code
stringlengths 75
104k
| code_sememe
stringlengths 47
309k
| token_type
stringlengths 215
214k
| code_dependency
stringlengths 75
155k
|
---|---|---|---|
def makeConstructor(self, originalConstructor, syntheticMemberList, doesConsumeArguments):
"""
:type syntheticMemberList: list(SyntheticMember)
:type doesConsumeArguments: bool
"""
# Original constructor's expected args.
originalConstructorExpectedArgList = []
doesExpectVariadicArgs = False
doesExpectKeywordedArgs = False
if inspect.isfunction(originalConstructor) or inspect.ismethod(originalConstructor):
argSpec = inspect.getargspec(originalConstructor)
# originalConstructorExpectedArgList = expected args - self.
originalConstructorExpectedArgList = argSpec.args[1:]
doesExpectVariadicArgs = (argSpec.varargs is not None)
doesExpectKeywordedArgs = (argSpec.keywords is not None)
def init(instance, *args, **kwargs):
if doesConsumeArguments:
# Merge original constructor's args specification with member list and make an args dict.
positionalArgumentKeyValueList = self._positionalArgumentKeyValueList(
originalConstructorExpectedArgList,
syntheticMemberList,
args)
# Set members values.
for syntheticMember in syntheticMemberList:
memberName = syntheticMember.memberName()
# Default value.
value = syntheticMember.default()
# Constructor is synthesized.
if doesConsumeArguments:
value = self._consumeArgument(memberName,
positionalArgumentKeyValueList,
kwargs,
value)
# Checking that the contract is respected.
syntheticMember.checkContract(memberName, value)
# Initalizing member with a value.
setattr(instance,
syntheticMember.privateMemberName(),
value)
if doesConsumeArguments:
# Remove superfluous arguments that have been used for synthesization but are not expected by constructor.
args, kwargs = self._filterArgsAndKwargs(
originalConstructorExpectedArgList=originalConstructorExpectedArgList,
syntheticMemberList=syntheticMemberList,
positionalArgumentKeyValueList=positionalArgumentKeyValueList,
keywordedArgDict=kwargs
)
# Call original constructor.
if originalConstructor is not None:
originalConstructor(instance, *args, **kwargs)
return init | def function[makeConstructor, parameter[self, originalConstructor, syntheticMemberList, doesConsumeArguments]]:
constant[
:type syntheticMemberList: list(SyntheticMember)
:type doesConsumeArguments: bool
]
variable[originalConstructorExpectedArgList] assign[=] list[[]]
variable[doesExpectVariadicArgs] assign[=] constant[False]
variable[doesExpectKeywordedArgs] assign[=] constant[False]
if <ast.BoolOp object at 0x7da20e954f10> begin[:]
variable[argSpec] assign[=] call[name[inspect].getargspec, parameter[name[originalConstructor]]]
variable[originalConstructorExpectedArgList] assign[=] call[name[argSpec].args][<ast.Slice object at 0x7da20e954c10>]
variable[doesExpectVariadicArgs] assign[=] compare[name[argSpec].varargs is_not constant[None]]
variable[doesExpectKeywordedArgs] assign[=] compare[name[argSpec].keywords is_not constant[None]]
def function[init, parameter[instance]]:
if name[doesConsumeArguments] begin[:]
variable[positionalArgumentKeyValueList] assign[=] call[name[self]._positionalArgumentKeyValueList, parameter[name[originalConstructorExpectedArgList], name[syntheticMemberList], name[args]]]
for taget[name[syntheticMember]] in starred[name[syntheticMemberList]] begin[:]
variable[memberName] assign[=] call[name[syntheticMember].memberName, parameter[]]
variable[value] assign[=] call[name[syntheticMember].default, parameter[]]
if name[doesConsumeArguments] begin[:]
variable[value] assign[=] call[name[self]._consumeArgument, parameter[name[memberName], name[positionalArgumentKeyValueList], name[kwargs], name[value]]]
call[name[syntheticMember].checkContract, parameter[name[memberName], name[value]]]
call[name[setattr], parameter[name[instance], call[name[syntheticMember].privateMemberName, parameter[]], name[value]]]
if name[doesConsumeArguments] begin[:]
<ast.Tuple object at 0x7da18bcca4a0> assign[=] call[name[self]._filterArgsAndKwargs, parameter[]]
if compare[name[originalConstructor] is_not constant[None]] begin[:]
call[name[originalConstructor], parameter[name[instance], <ast.Starred object at 0x7da18dc99f60>]]
return[name[init]] | keyword[def] identifier[makeConstructor] ( identifier[self] , identifier[originalConstructor] , identifier[syntheticMemberList] , identifier[doesConsumeArguments] ):
literal[string]
identifier[originalConstructorExpectedArgList] =[]
identifier[doesExpectVariadicArgs] = keyword[False]
identifier[doesExpectKeywordedArgs] = keyword[False]
keyword[if] identifier[inspect] . identifier[isfunction] ( identifier[originalConstructor] ) keyword[or] identifier[inspect] . identifier[ismethod] ( identifier[originalConstructor] ):
identifier[argSpec] = identifier[inspect] . identifier[getargspec] ( identifier[originalConstructor] )
identifier[originalConstructorExpectedArgList] = identifier[argSpec] . identifier[args] [ literal[int] :]
identifier[doesExpectVariadicArgs] =( identifier[argSpec] . identifier[varargs] keyword[is] keyword[not] keyword[None] )
identifier[doesExpectKeywordedArgs] =( identifier[argSpec] . identifier[keywords] keyword[is] keyword[not] keyword[None] )
keyword[def] identifier[init] ( identifier[instance] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[doesConsumeArguments] :
identifier[positionalArgumentKeyValueList] = identifier[self] . identifier[_positionalArgumentKeyValueList] (
identifier[originalConstructorExpectedArgList] ,
identifier[syntheticMemberList] ,
identifier[args] )
keyword[for] identifier[syntheticMember] keyword[in] identifier[syntheticMemberList] :
identifier[memberName] = identifier[syntheticMember] . identifier[memberName] ()
identifier[value] = identifier[syntheticMember] . identifier[default] ()
keyword[if] identifier[doesConsumeArguments] :
identifier[value] = identifier[self] . identifier[_consumeArgument] ( identifier[memberName] ,
identifier[positionalArgumentKeyValueList] ,
identifier[kwargs] ,
identifier[value] )
identifier[syntheticMember] . identifier[checkContract] ( identifier[memberName] , identifier[value] )
identifier[setattr] ( identifier[instance] ,
identifier[syntheticMember] . identifier[privateMemberName] (),
identifier[value] )
keyword[if] identifier[doesConsumeArguments] :
identifier[args] , identifier[kwargs] = identifier[self] . identifier[_filterArgsAndKwargs] (
identifier[originalConstructorExpectedArgList] = identifier[originalConstructorExpectedArgList] ,
identifier[syntheticMemberList] = identifier[syntheticMemberList] ,
identifier[positionalArgumentKeyValueList] = identifier[positionalArgumentKeyValueList] ,
identifier[keywordedArgDict] = identifier[kwargs]
)
keyword[if] identifier[originalConstructor] keyword[is] keyword[not] keyword[None] :
identifier[originalConstructor] ( identifier[instance] ,* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[init] | def makeConstructor(self, originalConstructor, syntheticMemberList, doesConsumeArguments):
"""
:type syntheticMemberList: list(SyntheticMember)
:type doesConsumeArguments: bool
"""
# Original constructor's expected args.
originalConstructorExpectedArgList = []
doesExpectVariadicArgs = False
doesExpectKeywordedArgs = False
if inspect.isfunction(originalConstructor) or inspect.ismethod(originalConstructor):
argSpec = inspect.getargspec(originalConstructor)
# originalConstructorExpectedArgList = expected args - self.
originalConstructorExpectedArgList = argSpec.args[1:]
doesExpectVariadicArgs = argSpec.varargs is not None
doesExpectKeywordedArgs = argSpec.keywords is not None # depends on [control=['if'], data=[]]
def init(instance, *args, **kwargs):
if doesConsumeArguments:
# Merge original constructor's args specification with member list and make an args dict.
positionalArgumentKeyValueList = self._positionalArgumentKeyValueList(originalConstructorExpectedArgList, syntheticMemberList, args) # depends on [control=['if'], data=[]]
# Set members values.
for syntheticMember in syntheticMemberList:
memberName = syntheticMember.memberName()
# Default value.
value = syntheticMember.default()
# Constructor is synthesized.
if doesConsumeArguments:
value = self._consumeArgument(memberName, positionalArgumentKeyValueList, kwargs, value)
# Checking that the contract is respected.
syntheticMember.checkContract(memberName, value) # depends on [control=['if'], data=[]]
# Initalizing member with a value.
setattr(instance, syntheticMember.privateMemberName(), value) # depends on [control=['for'], data=['syntheticMember']]
if doesConsumeArguments:
# Remove superfluous arguments that have been used for synthesization but are not expected by constructor.
(args, kwargs) = self._filterArgsAndKwargs(originalConstructorExpectedArgList=originalConstructorExpectedArgList, syntheticMemberList=syntheticMemberList, positionalArgumentKeyValueList=positionalArgumentKeyValueList, keywordedArgDict=kwargs) # depends on [control=['if'], data=[]]
# Call original constructor.
if originalConstructor is not None:
originalConstructor(instance, *args, **kwargs) # depends on [control=['if'], data=['originalConstructor']]
return init |
def symbols(self):
"""List of the coded symbols as strings, with special characters included."""
def _iter_symbols(symbol_values):
# The initial charset doesn't matter, as the start codes have the same symbol values in all charsets.
charset = 'A'
shift_charset = None
for symbol_value in symbol_values:
if shift_charset:
symbol = self._val2sym[shift_charset][symbol_value]
shift_charset = None
else:
symbol = self._val2sym[charset][symbol_value]
if symbol in (self.Special.START_A, self.Special.CODE_A):
charset = 'A'
elif symbol in (self.Special.START_B, self.Special.CODE_B):
charset = 'B'
elif symbol in (self.Special.START_C, self.Special.CODE_C):
charset = 'C'
elif symbol in (self.Special.SHIFT_A,):
shift_charset = 'A'
elif symbol in (self.Special.SHIFT_B,):
shift_charset = 'B'
yield symbol
return list(_iter_symbols(self.symbol_values)) | def function[symbols, parameter[self]]:
constant[List of the coded symbols as strings, with special characters included.]
def function[_iter_symbols, parameter[symbol_values]]:
variable[charset] assign[=] constant[A]
variable[shift_charset] assign[=] constant[None]
for taget[name[symbol_value]] in starred[name[symbol_values]] begin[:]
if name[shift_charset] begin[:]
variable[symbol] assign[=] call[call[name[self]._val2sym][name[shift_charset]]][name[symbol_value]]
variable[shift_charset] assign[=] constant[None]
if compare[name[symbol] in tuple[[<ast.Attribute object at 0x7da1b2346f50>, <ast.Attribute object at 0x7da1b2345ae0>]]] begin[:]
variable[charset] assign[=] constant[A]
<ast.Yield object at 0x7da1b23462f0>
return[call[name[list], parameter[call[name[_iter_symbols], parameter[name[self].symbol_values]]]]] | keyword[def] identifier[symbols] ( identifier[self] ):
literal[string]
keyword[def] identifier[_iter_symbols] ( identifier[symbol_values] ):
identifier[charset] = literal[string]
identifier[shift_charset] = keyword[None]
keyword[for] identifier[symbol_value] keyword[in] identifier[symbol_values] :
keyword[if] identifier[shift_charset] :
identifier[symbol] = identifier[self] . identifier[_val2sym] [ identifier[shift_charset] ][ identifier[symbol_value] ]
identifier[shift_charset] = keyword[None]
keyword[else] :
identifier[symbol] = identifier[self] . identifier[_val2sym] [ identifier[charset] ][ identifier[symbol_value] ]
keyword[if] identifier[symbol] keyword[in] ( identifier[self] . identifier[Special] . identifier[START_A] , identifier[self] . identifier[Special] . identifier[CODE_A] ):
identifier[charset] = literal[string]
keyword[elif] identifier[symbol] keyword[in] ( identifier[self] . identifier[Special] . identifier[START_B] , identifier[self] . identifier[Special] . identifier[CODE_B] ):
identifier[charset] = literal[string]
keyword[elif] identifier[symbol] keyword[in] ( identifier[self] . identifier[Special] . identifier[START_C] , identifier[self] . identifier[Special] . identifier[CODE_C] ):
identifier[charset] = literal[string]
keyword[elif] identifier[symbol] keyword[in] ( identifier[self] . identifier[Special] . identifier[SHIFT_A] ,):
identifier[shift_charset] = literal[string]
keyword[elif] identifier[symbol] keyword[in] ( identifier[self] . identifier[Special] . identifier[SHIFT_B] ,):
identifier[shift_charset] = literal[string]
keyword[yield] identifier[symbol]
keyword[return] identifier[list] ( identifier[_iter_symbols] ( identifier[self] . identifier[symbol_values] )) | def symbols(self):
"""List of the coded symbols as strings, with special characters included."""
def _iter_symbols(symbol_values):
# The initial charset doesn't matter, as the start codes have the same symbol values in all charsets.
charset = 'A'
shift_charset = None
for symbol_value in symbol_values:
if shift_charset:
symbol = self._val2sym[shift_charset][symbol_value]
shift_charset = None # depends on [control=['if'], data=[]]
else:
symbol = self._val2sym[charset][symbol_value]
if symbol in (self.Special.START_A, self.Special.CODE_A):
charset = 'A' # depends on [control=['if'], data=[]]
elif symbol in (self.Special.START_B, self.Special.CODE_B):
charset = 'B' # depends on [control=['if'], data=[]]
elif symbol in (self.Special.START_C, self.Special.CODE_C):
charset = 'C' # depends on [control=['if'], data=[]]
elif symbol in (self.Special.SHIFT_A,):
shift_charset = 'A' # depends on [control=['if'], data=[]]
elif symbol in (self.Special.SHIFT_B,):
shift_charset = 'B' # depends on [control=['if'], data=[]]
yield symbol # depends on [control=['for'], data=['symbol_value']]
return list(_iter_symbols(self.symbol_values)) |
def get_dict(self, only_attributes=None, exclude_attributes=None, df_format=False):
"""Summarize the I-TASSER run in a dictionary containing modeling results and top predictions from COACH
Args:
only_attributes (str, list): Attributes that should be returned. If not provided, all are returned.
exclude_attributes (str, list): Attributes that should be excluded.
df_format (bool): If dictionary values should be formatted for a dataframe
(everything possible is transformed into strings, int, or float -
if something can't be transformed it is excluded)
Returns:
dict: Dictionary of attributes
"""
to_exclude = ['coach_bsites', 'coach_ec', 'coach_go_mf', 'coach_go_bp', 'coach_go_cc']
if not exclude_attributes:
excluder = to_exclude
else:
excluder = ssbio.utils.force_list(exclude_attributes)
excluder.extend(to_exclude)
summary_dict = StructProp.get_dict(self, only_attributes=only_attributes,
exclude_attributes=excluder,
df_format=df_format)
if self.coach_bsites:
tmp = {'top_bsite_' + k:v for k, v in self.coach_bsites[0].items()}
summary_dict.update(tmp)
if self.coach_ec:
tmp = {'top_ec_' + k: v for k, v in self.coach_ec[0].items()}
summary_dict.update(tmp)
if self.coach_go_mf:
tmp = {'top_go_mf_' + k: v for k, v in self.coach_go_mf[0].items()}
summary_dict.update(tmp)
if self.coach_go_bp:
tmp = {'top_go_bp_' + k: v for k, v in self.coach_go_bp[0].items()}
summary_dict.update(tmp)
if self.coach_go_cc:
tmp = {'top_go_cc_' + k: v for k, v in self.coach_go_cc[0].items()}
summary_dict.update(tmp)
return summary_dict | def function[get_dict, parameter[self, only_attributes, exclude_attributes, df_format]]:
constant[Summarize the I-TASSER run in a dictionary containing modeling results and top predictions from COACH
Args:
only_attributes (str, list): Attributes that should be returned. If not provided, all are returned.
exclude_attributes (str, list): Attributes that should be excluded.
df_format (bool): If dictionary values should be formatted for a dataframe
(everything possible is transformed into strings, int, or float -
if something can't be transformed it is excluded)
Returns:
dict: Dictionary of attributes
]
variable[to_exclude] assign[=] list[[<ast.Constant object at 0x7da1b0e47550>, <ast.Constant object at 0x7da1b0e46500>, <ast.Constant object at 0x7da1b0e469e0>, <ast.Constant object at 0x7da1b0e45420>, <ast.Constant object at 0x7da1b0e45ab0>]]
if <ast.UnaryOp object at 0x7da1b0e47580> begin[:]
variable[excluder] assign[=] name[to_exclude]
variable[summary_dict] assign[=] call[name[StructProp].get_dict, parameter[name[self]]]
if name[self].coach_bsites begin[:]
variable[tmp] assign[=] <ast.DictComp object at 0x7da1b0e440a0>
call[name[summary_dict].update, parameter[name[tmp]]]
if name[self].coach_ec begin[:]
variable[tmp] assign[=] <ast.DictComp object at 0x7da1b0e47bb0>
call[name[summary_dict].update, parameter[name[tmp]]]
if name[self].coach_go_mf begin[:]
variable[tmp] assign[=] <ast.DictComp object at 0x7da1b0e46680>
call[name[summary_dict].update, parameter[name[tmp]]]
if name[self].coach_go_bp begin[:]
variable[tmp] assign[=] <ast.DictComp object at 0x7da1b0e6e230>
call[name[summary_dict].update, parameter[name[tmp]]]
if name[self].coach_go_cc begin[:]
variable[tmp] assign[=] <ast.DictComp object at 0x7da1b0e6e3e0>
call[name[summary_dict].update, parameter[name[tmp]]]
return[name[summary_dict]] | keyword[def] identifier[get_dict] ( identifier[self] , identifier[only_attributes] = keyword[None] , identifier[exclude_attributes] = keyword[None] , identifier[df_format] = keyword[False] ):
literal[string]
identifier[to_exclude] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] keyword[not] identifier[exclude_attributes] :
identifier[excluder] = identifier[to_exclude]
keyword[else] :
identifier[excluder] = identifier[ssbio] . identifier[utils] . identifier[force_list] ( identifier[exclude_attributes] )
identifier[excluder] . identifier[extend] ( identifier[to_exclude] )
identifier[summary_dict] = identifier[StructProp] . identifier[get_dict] ( identifier[self] , identifier[only_attributes] = identifier[only_attributes] ,
identifier[exclude_attributes] = identifier[excluder] ,
identifier[df_format] = identifier[df_format] )
keyword[if] identifier[self] . identifier[coach_bsites] :
identifier[tmp] ={ literal[string] + identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[coach_bsites] [ literal[int] ]. identifier[items] ()}
identifier[summary_dict] . identifier[update] ( identifier[tmp] )
keyword[if] identifier[self] . identifier[coach_ec] :
identifier[tmp] ={ literal[string] + identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[coach_ec] [ literal[int] ]. identifier[items] ()}
identifier[summary_dict] . identifier[update] ( identifier[tmp] )
keyword[if] identifier[self] . identifier[coach_go_mf] :
identifier[tmp] ={ literal[string] + identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[coach_go_mf] [ literal[int] ]. identifier[items] ()}
identifier[summary_dict] . identifier[update] ( identifier[tmp] )
keyword[if] identifier[self] . identifier[coach_go_bp] :
identifier[tmp] ={ literal[string] + identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[coach_go_bp] [ literal[int] ]. identifier[items] ()}
identifier[summary_dict] . identifier[update] ( identifier[tmp] )
keyword[if] identifier[self] . identifier[coach_go_cc] :
identifier[tmp] ={ literal[string] + identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[coach_go_cc] [ literal[int] ]. identifier[items] ()}
identifier[summary_dict] . identifier[update] ( identifier[tmp] )
keyword[return] identifier[summary_dict] | def get_dict(self, only_attributes=None, exclude_attributes=None, df_format=False):
"""Summarize the I-TASSER run in a dictionary containing modeling results and top predictions from COACH
Args:
only_attributes (str, list): Attributes that should be returned. If not provided, all are returned.
exclude_attributes (str, list): Attributes that should be excluded.
df_format (bool): If dictionary values should be formatted for a dataframe
(everything possible is transformed into strings, int, or float -
if something can't be transformed it is excluded)
Returns:
dict: Dictionary of attributes
"""
to_exclude = ['coach_bsites', 'coach_ec', 'coach_go_mf', 'coach_go_bp', 'coach_go_cc']
if not exclude_attributes:
excluder = to_exclude # depends on [control=['if'], data=[]]
else:
excluder = ssbio.utils.force_list(exclude_attributes)
excluder.extend(to_exclude)
summary_dict = StructProp.get_dict(self, only_attributes=only_attributes, exclude_attributes=excluder, df_format=df_format)
if self.coach_bsites:
tmp = {'top_bsite_' + k: v for (k, v) in self.coach_bsites[0].items()}
summary_dict.update(tmp) # depends on [control=['if'], data=[]]
if self.coach_ec:
tmp = {'top_ec_' + k: v for (k, v) in self.coach_ec[0].items()}
summary_dict.update(tmp) # depends on [control=['if'], data=[]]
if self.coach_go_mf:
tmp = {'top_go_mf_' + k: v for (k, v) in self.coach_go_mf[0].items()}
summary_dict.update(tmp) # depends on [control=['if'], data=[]]
if self.coach_go_bp:
tmp = {'top_go_bp_' + k: v for (k, v) in self.coach_go_bp[0].items()}
summary_dict.update(tmp) # depends on [control=['if'], data=[]]
if self.coach_go_cc:
tmp = {'top_go_cc_' + k: v for (k, v) in self.coach_go_cc[0].items()}
summary_dict.update(tmp) # depends on [control=['if'], data=[]]
return summary_dict |
def autoclean_cv(training_dataframe, testing_dataframe, drop_nans=False, copy=False,
encoder=None, encoder_kwargs=None, ignore_update_check=False):
"""Performs a series of automated data cleaning transformations on the provided training and testing data sets
Unlike `autoclean()`, this function takes cross-validation into account by learning the data transformations
from only the training set, then applying those transformations to both the training and testing set.
By doing so, this function will prevent information leak from the training set into the testing set.
Parameters
----------
training_dataframe: pandas.DataFrame
Training data set
testing_dataframe: pandas.DataFrame
Testing data set
drop_nans: bool
Drop all rows that have a NaN in any column (default: False)
copy: bool
Make a copy of the data set (default: False)
encoder: category_encoders transformer
The a valid category_encoders transformer which is passed an inferred cols list. Default (None: LabelEncoder)
encoder_kwargs: category_encoders
The a valid sklearn transformer to encode categorical features. Default (None)
ignore_update_check: bool
Do not check for the latest version of datacleaner
Returns
----------
output_training_dataframe: pandas.DataFrame
Cleaned training data set
output_testing_dataframe: pandas.DataFrame
Cleaned testing data set
"""
global update_checked
if ignore_update_check:
update_checked = True
if not update_checked:
update_check('datacleaner', __version__)
update_checked = True
if set(training_dataframe.columns.values) != set(testing_dataframe.columns.values):
raise ValueError('The training and testing DataFrames do not have the same columns. '
'Make sure that you are providing the same columns.')
if copy:
training_dataframe = training_dataframe.copy()
testing_dataframe = testing_dataframe.copy()
if drop_nans:
training_dataframe.dropna(inplace=True)
testing_dataframe.dropna(inplace=True)
if encoder_kwargs is None:
encoder_kwargs = {}
for column in training_dataframe.columns.values:
# Replace NaNs with the median or mode of the column depending on the column type
try:
column_median = training_dataframe[column].median()
training_dataframe[column].fillna(column_median, inplace=True)
testing_dataframe[column].fillna(column_median, inplace=True)
except TypeError:
column_mode = training_dataframe[column].mode()[0]
training_dataframe[column].fillna(column_mode, inplace=True)
testing_dataframe[column].fillna(column_mode, inplace=True)
# Encode all strings with numerical equivalents
if str(training_dataframe[column].values.dtype) == 'object':
if encoder is not None:
column_encoder = encoder(**encoder_kwargs).fit(training_dataframe[column].values)
else:
column_encoder = LabelEncoder().fit(training_dataframe[column].values)
training_dataframe[column] = column_encoder.transform(training_dataframe[column].values)
testing_dataframe[column] = column_encoder.transform(testing_dataframe[column].values)
return training_dataframe, testing_dataframe | def function[autoclean_cv, parameter[training_dataframe, testing_dataframe, drop_nans, copy, encoder, encoder_kwargs, ignore_update_check]]:
constant[Performs a series of automated data cleaning transformations on the provided training and testing data sets
Unlike `autoclean()`, this function takes cross-validation into account by learning the data transformations
from only the training set, then applying those transformations to both the training and testing set.
By doing so, this function will prevent information leak from the training set into the testing set.
Parameters
----------
training_dataframe: pandas.DataFrame
Training data set
testing_dataframe: pandas.DataFrame
Testing data set
drop_nans: bool
Drop all rows that have a NaN in any column (default: False)
copy: bool
Make a copy of the data set (default: False)
encoder: category_encoders transformer
The a valid category_encoders transformer which is passed an inferred cols list. Default (None: LabelEncoder)
encoder_kwargs: category_encoders
The a valid sklearn transformer to encode categorical features. Default (None)
ignore_update_check: bool
Do not check for the latest version of datacleaner
Returns
----------
output_training_dataframe: pandas.DataFrame
Cleaned training data set
output_testing_dataframe: pandas.DataFrame
Cleaned testing data set
]
<ast.Global object at 0x7da1b20b4d90>
if name[ignore_update_check] begin[:]
variable[update_checked] assign[=] constant[True]
if <ast.UnaryOp object at 0x7da1b20b5d50> begin[:]
call[name[update_check], parameter[constant[datacleaner], name[__version__]]]
variable[update_checked] assign[=] constant[True]
if compare[call[name[set], parameter[name[training_dataframe].columns.values]] not_equal[!=] call[name[set], parameter[name[testing_dataframe].columns.values]]] begin[:]
<ast.Raise object at 0x7da1b20b5780>
if name[copy] begin[:]
variable[training_dataframe] assign[=] call[name[training_dataframe].copy, parameter[]]
variable[testing_dataframe] assign[=] call[name[testing_dataframe].copy, parameter[]]
if name[drop_nans] begin[:]
call[name[training_dataframe].dropna, parameter[]]
call[name[testing_dataframe].dropna, parameter[]]
if compare[name[encoder_kwargs] is constant[None]] begin[:]
variable[encoder_kwargs] assign[=] dictionary[[], []]
for taget[name[column]] in starred[name[training_dataframe].columns.values] begin[:]
<ast.Try object at 0x7da1b20b4c10>
if compare[call[name[str], parameter[call[name[training_dataframe]][name[column]].values.dtype]] equal[==] constant[object]] begin[:]
if compare[name[encoder] is_not constant[None]] begin[:]
variable[column_encoder] assign[=] call[call[name[encoder], parameter[]].fit, parameter[call[name[training_dataframe]][name[column]].values]]
call[name[training_dataframe]][name[column]] assign[=] call[name[column_encoder].transform, parameter[call[name[training_dataframe]][name[column]].values]]
call[name[testing_dataframe]][name[column]] assign[=] call[name[column_encoder].transform, parameter[call[name[testing_dataframe]][name[column]].values]]
return[tuple[[<ast.Name object at 0x7da18bc72c80>, <ast.Name object at 0x7da18bc71bd0>]]] | keyword[def] identifier[autoclean_cv] ( identifier[training_dataframe] , identifier[testing_dataframe] , identifier[drop_nans] = keyword[False] , identifier[copy] = keyword[False] ,
identifier[encoder] = keyword[None] , identifier[encoder_kwargs] = keyword[None] , identifier[ignore_update_check] = keyword[False] ):
literal[string]
keyword[global] identifier[update_checked]
keyword[if] identifier[ignore_update_check] :
identifier[update_checked] = keyword[True]
keyword[if] keyword[not] identifier[update_checked] :
identifier[update_check] ( literal[string] , identifier[__version__] )
identifier[update_checked] = keyword[True]
keyword[if] identifier[set] ( identifier[training_dataframe] . identifier[columns] . identifier[values] )!= identifier[set] ( identifier[testing_dataframe] . identifier[columns] . identifier[values] ):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[if] identifier[copy] :
identifier[training_dataframe] = identifier[training_dataframe] . identifier[copy] ()
identifier[testing_dataframe] = identifier[testing_dataframe] . identifier[copy] ()
keyword[if] identifier[drop_nans] :
identifier[training_dataframe] . identifier[dropna] ( identifier[inplace] = keyword[True] )
identifier[testing_dataframe] . identifier[dropna] ( identifier[inplace] = keyword[True] )
keyword[if] identifier[encoder_kwargs] keyword[is] keyword[None] :
identifier[encoder_kwargs] ={}
keyword[for] identifier[column] keyword[in] identifier[training_dataframe] . identifier[columns] . identifier[values] :
keyword[try] :
identifier[column_median] = identifier[training_dataframe] [ identifier[column] ]. identifier[median] ()
identifier[training_dataframe] [ identifier[column] ]. identifier[fillna] ( identifier[column_median] , identifier[inplace] = keyword[True] )
identifier[testing_dataframe] [ identifier[column] ]. identifier[fillna] ( identifier[column_median] , identifier[inplace] = keyword[True] )
keyword[except] identifier[TypeError] :
identifier[column_mode] = identifier[training_dataframe] [ identifier[column] ]. identifier[mode] ()[ literal[int] ]
identifier[training_dataframe] [ identifier[column] ]. identifier[fillna] ( identifier[column_mode] , identifier[inplace] = keyword[True] )
identifier[testing_dataframe] [ identifier[column] ]. identifier[fillna] ( identifier[column_mode] , identifier[inplace] = keyword[True] )
keyword[if] identifier[str] ( identifier[training_dataframe] [ identifier[column] ]. identifier[values] . identifier[dtype] )== literal[string] :
keyword[if] identifier[encoder] keyword[is] keyword[not] keyword[None] :
identifier[column_encoder] = identifier[encoder] (** identifier[encoder_kwargs] ). identifier[fit] ( identifier[training_dataframe] [ identifier[column] ]. identifier[values] )
keyword[else] :
identifier[column_encoder] = identifier[LabelEncoder] (). identifier[fit] ( identifier[training_dataframe] [ identifier[column] ]. identifier[values] )
identifier[training_dataframe] [ identifier[column] ]= identifier[column_encoder] . identifier[transform] ( identifier[training_dataframe] [ identifier[column] ]. identifier[values] )
identifier[testing_dataframe] [ identifier[column] ]= identifier[column_encoder] . identifier[transform] ( identifier[testing_dataframe] [ identifier[column] ]. identifier[values] )
keyword[return] identifier[training_dataframe] , identifier[testing_dataframe] | def autoclean_cv(training_dataframe, testing_dataframe, drop_nans=False, copy=False, encoder=None, encoder_kwargs=None, ignore_update_check=False):
"""Performs a series of automated data cleaning transformations on the provided training and testing data sets
Unlike `autoclean()`, this function takes cross-validation into account by learning the data transformations
from only the training set, then applying those transformations to both the training and testing set.
By doing so, this function will prevent information leak from the training set into the testing set.
Parameters
----------
training_dataframe: pandas.DataFrame
Training data set
testing_dataframe: pandas.DataFrame
Testing data set
drop_nans: bool
Drop all rows that have a NaN in any column (default: False)
copy: bool
Make a copy of the data set (default: False)
encoder: category_encoders transformer
The a valid category_encoders transformer which is passed an inferred cols list. Default (None: LabelEncoder)
encoder_kwargs: category_encoders
The a valid sklearn transformer to encode categorical features. Default (None)
ignore_update_check: bool
Do not check for the latest version of datacleaner
Returns
----------
output_training_dataframe: pandas.DataFrame
Cleaned training data set
output_testing_dataframe: pandas.DataFrame
Cleaned testing data set
"""
global update_checked
if ignore_update_check:
update_checked = True # depends on [control=['if'], data=[]]
if not update_checked:
update_check('datacleaner', __version__)
update_checked = True # depends on [control=['if'], data=[]]
if set(training_dataframe.columns.values) != set(testing_dataframe.columns.values):
raise ValueError('The training and testing DataFrames do not have the same columns. Make sure that you are providing the same columns.') # depends on [control=['if'], data=[]]
if copy:
training_dataframe = training_dataframe.copy()
testing_dataframe = testing_dataframe.copy() # depends on [control=['if'], data=[]]
if drop_nans:
training_dataframe.dropna(inplace=True)
testing_dataframe.dropna(inplace=True) # depends on [control=['if'], data=[]]
if encoder_kwargs is None:
encoder_kwargs = {} # depends on [control=['if'], data=['encoder_kwargs']]
for column in training_dataframe.columns.values:
# Replace NaNs with the median or mode of the column depending on the column type
try:
column_median = training_dataframe[column].median()
training_dataframe[column].fillna(column_median, inplace=True)
testing_dataframe[column].fillna(column_median, inplace=True) # depends on [control=['try'], data=[]]
except TypeError:
column_mode = training_dataframe[column].mode()[0]
training_dataframe[column].fillna(column_mode, inplace=True)
testing_dataframe[column].fillna(column_mode, inplace=True) # depends on [control=['except'], data=[]]
# Encode all strings with numerical equivalents
if str(training_dataframe[column].values.dtype) == 'object':
if encoder is not None:
column_encoder = encoder(**encoder_kwargs).fit(training_dataframe[column].values) # depends on [control=['if'], data=['encoder']]
else:
column_encoder = LabelEncoder().fit(training_dataframe[column].values)
training_dataframe[column] = column_encoder.transform(training_dataframe[column].values)
testing_dataframe[column] = column_encoder.transform(testing_dataframe[column].values) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['column']]
return (training_dataframe, testing_dataframe) |
def read_plain(file_obj, type_, count):
"""Read `count` items `type` from the fo using the plain encoding."""
if count == 0:
return []
conv = DECODE_PLAIN[type_]
return conv(file_obj, count) | def function[read_plain, parameter[file_obj, type_, count]]:
constant[Read `count` items `type` from the fo using the plain encoding.]
if compare[name[count] equal[==] constant[0]] begin[:]
return[list[[]]]
variable[conv] assign[=] call[name[DECODE_PLAIN]][name[type_]]
return[call[name[conv], parameter[name[file_obj], name[count]]]] | keyword[def] identifier[read_plain] ( identifier[file_obj] , identifier[type_] , identifier[count] ):
literal[string]
keyword[if] identifier[count] == literal[int] :
keyword[return] []
identifier[conv] = identifier[DECODE_PLAIN] [ identifier[type_] ]
keyword[return] identifier[conv] ( identifier[file_obj] , identifier[count] ) | def read_plain(file_obj, type_, count):
"""Read `count` items `type` from the fo using the plain encoding."""
if count == 0:
return [] # depends on [control=['if'], data=[]]
conv = DECODE_PLAIN[type_]
return conv(file_obj, count) |
def deconv2d(self, filter_size, output_channels, stride=1, padding='SAME', stoch=False, ladder=None,
activation_fn=tf.nn.relu, b_value=0.0, s_value=1.0, bn=True):
"""
2D Deconvolutional Layer
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
"""
self.count['deconv'] += 1
scope = 'deconv_' + str(self.count['deconv'])
with tf.variable_scope(scope):
# Calculate the dimensions for deconv function
batch_size = tf.shape(self.input)[0]
input_height = tf.shape(self.input)[1]
input_width = tf.shape(self.input)[2]
if padding == "VALID":
out_rows = (input_height - 1) * stride + filter_size
out_cols = (input_width - 1) * stride + filter_size
else: # padding == "SAME":
out_rows = input_height * stride
out_cols = input_width * stride
# Deconv function
input_channels = self.input.get_shape()[3]
output_shape = [filter_size, filter_size, output_channels, input_channels]
w = self.weight_variable(name='weights', shape=output_shape)
deconv_out_shape = tf.pack([batch_size, out_rows, out_cols, output_channels])
self.input = tf.nn.conv2d_transpose(self.input, w, deconv_out_shape, [1, stride, stride, 1], padding)
# Additional functions
if ladder is not None:
stoch = False
enc_mean = self.dec_ladder[self.stoch_count_dec][0]
enc_std = self.dec_ladder[self.stoch_count_dec][1]
self.stoch_count_dec += 1
with tf.variable_scope("ladder"):
input_shape = [enc_mean.get_shape()[1], enc_mean.get_shape()[2], enc_mean.get_shape()[3]]
w_std = self.weight_variable(name='weights_mean', shape=input_shape)
w_mean = self.weight_variable(name='weights_std', shape=input_shape)
mean = self.input * w_mean
std = tf.nn.softplus(self.input * w_std)
if ladder == 1: # LVAE Implementation
eps = 1e-10
new_std = 1 / ((enc_std + eps) ** 2 + (std + eps) ** 2)
new_mean = new_std * (enc_mean * (1 / (enc_std + eps) ** 2) + mean * (1 / (std + eps) ** 2))
self.input = new_mean + tf.random_normal(tf.shape(self.input)) * new_std
elif ladder == 2: # BLN Implementation
raise NotImplementedError
else:
self.input = mean + tf.random_normal(tf.shape(self.input)) * std
if stoch is True: # Draw sample from Normal Layer
mean, std = tf.split(3, 2, self.input)
self.input = mean + tf.random_normal(tf.shape(mean)) * std
output_channels = int(output_channels/2)
if bn is True: # batch normalization
self.input = self.batch_norm(self.input)
if b_value is not None: # bias value
b = self.const_variable(name='bias', shape=[output_channels], value=b_value)
self.input = tf.add(self.input, b)
if s_value is not None: # scale value
s = self.const_variable(name='scale', shape=[output_channels], value=s_value)
self.input = tf.multiply(self.input, s)
if activation_fn is not None: # non-linear activation function
self.input = activation_fn(self.input)
self.print_log(scope + ' output: ' + str(self.input.get_shape())) | def function[deconv2d, parameter[self, filter_size, output_channels, stride, padding, stoch, ladder, activation_fn, b_value, s_value, bn]]:
constant[
2D Deconvolutional Layer
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
]
<ast.AugAssign object at 0x7da207f03040>
variable[scope] assign[=] binary_operation[constant[deconv_] + call[name[str], parameter[call[name[self].count][constant[deconv]]]]]
with call[name[tf].variable_scope, parameter[name[scope]]] begin[:]
variable[batch_size] assign[=] call[call[name[tf].shape, parameter[name[self].input]]][constant[0]]
variable[input_height] assign[=] call[call[name[tf].shape, parameter[name[self].input]]][constant[1]]
variable[input_width] assign[=] call[call[name[tf].shape, parameter[name[self].input]]][constant[2]]
if compare[name[padding] equal[==] constant[VALID]] begin[:]
variable[out_rows] assign[=] binary_operation[binary_operation[binary_operation[name[input_height] - constant[1]] * name[stride]] + name[filter_size]]
variable[out_cols] assign[=] binary_operation[binary_operation[binary_operation[name[input_width] - constant[1]] * name[stride]] + name[filter_size]]
variable[input_channels] assign[=] call[call[name[self].input.get_shape, parameter[]]][constant[3]]
variable[output_shape] assign[=] list[[<ast.Name object at 0x7da207f03340>, <ast.Name object at 0x7da207f03f40>, <ast.Name object at 0x7da207f022c0>, <ast.Name object at 0x7da207f03f70>]]
variable[w] assign[=] call[name[self].weight_variable, parameter[]]
variable[deconv_out_shape] assign[=] call[name[tf].pack, parameter[list[[<ast.Name object at 0x7da207f00730>, <ast.Name object at 0x7da207f03e50>, <ast.Name object at 0x7da207f02170>, <ast.Name object at 0x7da207f01900>]]]]
name[self].input assign[=] call[name[tf].nn.conv2d_transpose, parameter[name[self].input, name[w], name[deconv_out_shape], list[[<ast.Constant object at 0x7da207f03580>, <ast.Name object at 0x7da207f02890>, <ast.Name object at 0x7da207f00610>, <ast.Constant object at 0x7da207f039d0>]], name[padding]]]
if compare[name[ladder] is_not constant[None]] begin[:]
variable[stoch] assign[=] constant[False]
variable[enc_mean] assign[=] call[call[name[self].dec_ladder][name[self].stoch_count_dec]][constant[0]]
variable[enc_std] assign[=] call[call[name[self].dec_ladder][name[self].stoch_count_dec]][constant[1]]
<ast.AugAssign object at 0x7da207f01fc0>
with call[name[tf].variable_scope, parameter[constant[ladder]]] begin[:]
variable[input_shape] assign[=] list[[<ast.Subscript object at 0x7da207f02e00>, <ast.Subscript object at 0x7da207f029b0>, <ast.Subscript object at 0x7da207f03670>]]
variable[w_std] assign[=] call[name[self].weight_variable, parameter[]]
variable[w_mean] assign[=] call[name[self].weight_variable, parameter[]]
variable[mean] assign[=] binary_operation[name[self].input * name[w_mean]]
variable[std] assign[=] call[name[tf].nn.softplus, parameter[binary_operation[name[self].input * name[w_std]]]]
if compare[name[ladder] equal[==] constant[1]] begin[:]
variable[eps] assign[=] constant[1e-10]
variable[new_std] assign[=] binary_operation[constant[1] / binary_operation[binary_operation[binary_operation[name[enc_std] + name[eps]] ** constant[2]] + binary_operation[binary_operation[name[std] + name[eps]] ** constant[2]]]]
variable[new_mean] assign[=] binary_operation[name[new_std] * binary_operation[binary_operation[name[enc_mean] * binary_operation[constant[1] / binary_operation[binary_operation[name[enc_std] + name[eps]] ** constant[2]]]] + binary_operation[name[mean] * binary_operation[constant[1] / binary_operation[binary_operation[name[std] + name[eps]] ** constant[2]]]]]]
name[self].input assign[=] binary_operation[name[new_mean] + binary_operation[call[name[tf].random_normal, parameter[call[name[tf].shape, parameter[name[self].input]]]] * name[new_std]]]
if compare[name[stoch] is constant[True]] begin[:]
<ast.Tuple object at 0x7da20c990b20> assign[=] call[name[tf].split, parameter[constant[3], constant[2], name[self].input]]
name[self].input assign[=] binary_operation[name[mean] + binary_operation[call[name[tf].random_normal, parameter[call[name[tf].shape, parameter[name[mean]]]]] * name[std]]]
variable[output_channels] assign[=] call[name[int], parameter[binary_operation[name[output_channels] / constant[2]]]]
if compare[name[bn] is constant[True]] begin[:]
name[self].input assign[=] call[name[self].batch_norm, parameter[name[self].input]]
if compare[name[b_value] is_not constant[None]] begin[:]
variable[b] assign[=] call[name[self].const_variable, parameter[]]
name[self].input assign[=] call[name[tf].add, parameter[name[self].input, name[b]]]
if compare[name[s_value] is_not constant[None]] begin[:]
variable[s] assign[=] call[name[self].const_variable, parameter[]]
name[self].input assign[=] call[name[tf].multiply, parameter[name[self].input, name[s]]]
if compare[name[activation_fn] is_not constant[None]] begin[:]
name[self].input assign[=] call[name[activation_fn], parameter[name[self].input]]
call[name[self].print_log, parameter[binary_operation[binary_operation[name[scope] + constant[ output: ]] + call[name[str], parameter[call[name[self].input.get_shape, parameter[]]]]]]] | keyword[def] identifier[deconv2d] ( identifier[self] , identifier[filter_size] , identifier[output_channels] , identifier[stride] = literal[int] , identifier[padding] = literal[string] , identifier[stoch] = keyword[False] , identifier[ladder] = keyword[None] ,
identifier[activation_fn] = identifier[tf] . identifier[nn] . identifier[relu] , identifier[b_value] = literal[int] , identifier[s_value] = literal[int] , identifier[bn] = keyword[True] ):
literal[string]
identifier[self] . identifier[count] [ literal[string] ]+= literal[int]
identifier[scope] = literal[string] + identifier[str] ( identifier[self] . identifier[count] [ literal[string] ])
keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[scope] ):
identifier[batch_size] = identifier[tf] . identifier[shape] ( identifier[self] . identifier[input] )[ literal[int] ]
identifier[input_height] = identifier[tf] . identifier[shape] ( identifier[self] . identifier[input] )[ literal[int] ]
identifier[input_width] = identifier[tf] . identifier[shape] ( identifier[self] . identifier[input] )[ literal[int] ]
keyword[if] identifier[padding] == literal[string] :
identifier[out_rows] =( identifier[input_height] - literal[int] )* identifier[stride] + identifier[filter_size]
identifier[out_cols] =( identifier[input_width] - literal[int] )* identifier[stride] + identifier[filter_size]
keyword[else] :
identifier[out_rows] = identifier[input_height] * identifier[stride]
identifier[out_cols] = identifier[input_width] * identifier[stride]
identifier[input_channels] = identifier[self] . identifier[input] . identifier[get_shape] ()[ literal[int] ]
identifier[output_shape] =[ identifier[filter_size] , identifier[filter_size] , identifier[output_channels] , identifier[input_channels] ]
identifier[w] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[output_shape] )
identifier[deconv_out_shape] = identifier[tf] . identifier[pack] ([ identifier[batch_size] , identifier[out_rows] , identifier[out_cols] , identifier[output_channels] ])
identifier[self] . identifier[input] = identifier[tf] . identifier[nn] . identifier[conv2d_transpose] ( identifier[self] . identifier[input] , identifier[w] , identifier[deconv_out_shape] ,[ literal[int] , identifier[stride] , identifier[stride] , literal[int] ], identifier[padding] )
keyword[if] identifier[ladder] keyword[is] keyword[not] keyword[None] :
identifier[stoch] = keyword[False]
identifier[enc_mean] = identifier[self] . identifier[dec_ladder] [ identifier[self] . identifier[stoch_count_dec] ][ literal[int] ]
identifier[enc_std] = identifier[self] . identifier[dec_ladder] [ identifier[self] . identifier[stoch_count_dec] ][ literal[int] ]
identifier[self] . identifier[stoch_count_dec] += literal[int]
keyword[with] identifier[tf] . identifier[variable_scope] ( literal[string] ):
identifier[input_shape] =[ identifier[enc_mean] . identifier[get_shape] ()[ literal[int] ], identifier[enc_mean] . identifier[get_shape] ()[ literal[int] ], identifier[enc_mean] . identifier[get_shape] ()[ literal[int] ]]
identifier[w_std] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[input_shape] )
identifier[w_mean] = identifier[self] . identifier[weight_variable] ( identifier[name] = literal[string] , identifier[shape] = identifier[input_shape] )
identifier[mean] = identifier[self] . identifier[input] * identifier[w_mean]
identifier[std] = identifier[tf] . identifier[nn] . identifier[softplus] ( identifier[self] . identifier[input] * identifier[w_std] )
keyword[if] identifier[ladder] == literal[int] :
identifier[eps] = literal[int]
identifier[new_std] = literal[int] /(( identifier[enc_std] + identifier[eps] )** literal[int] +( identifier[std] + identifier[eps] )** literal[int] )
identifier[new_mean] = identifier[new_std] *( identifier[enc_mean] *( literal[int] /( identifier[enc_std] + identifier[eps] )** literal[int] )+ identifier[mean] *( literal[int] /( identifier[std] + identifier[eps] )** literal[int] ))
identifier[self] . identifier[input] = identifier[new_mean] + identifier[tf] . identifier[random_normal] ( identifier[tf] . identifier[shape] ( identifier[self] . identifier[input] ))* identifier[new_std]
keyword[elif] identifier[ladder] == literal[int] :
keyword[raise] identifier[NotImplementedError]
keyword[else] :
identifier[self] . identifier[input] = identifier[mean] + identifier[tf] . identifier[random_normal] ( identifier[tf] . identifier[shape] ( identifier[self] . identifier[input] ))* identifier[std]
keyword[if] identifier[stoch] keyword[is] keyword[True] :
identifier[mean] , identifier[std] = identifier[tf] . identifier[split] ( literal[int] , literal[int] , identifier[self] . identifier[input] )
identifier[self] . identifier[input] = identifier[mean] + identifier[tf] . identifier[random_normal] ( identifier[tf] . identifier[shape] ( identifier[mean] ))* identifier[std]
identifier[output_channels] = identifier[int] ( identifier[output_channels] / literal[int] )
keyword[if] identifier[bn] keyword[is] keyword[True] :
identifier[self] . identifier[input] = identifier[self] . identifier[batch_norm] ( identifier[self] . identifier[input] )
keyword[if] identifier[b_value] keyword[is] keyword[not] keyword[None] :
identifier[b] = identifier[self] . identifier[const_variable] ( identifier[name] = literal[string] , identifier[shape] =[ identifier[output_channels] ], identifier[value] = identifier[b_value] )
identifier[self] . identifier[input] = identifier[tf] . identifier[add] ( identifier[self] . identifier[input] , identifier[b] )
keyword[if] identifier[s_value] keyword[is] keyword[not] keyword[None] :
identifier[s] = identifier[self] . identifier[const_variable] ( identifier[name] = literal[string] , identifier[shape] =[ identifier[output_channels] ], identifier[value] = identifier[s_value] )
identifier[self] . identifier[input] = identifier[tf] . identifier[multiply] ( identifier[self] . identifier[input] , identifier[s] )
keyword[if] identifier[activation_fn] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[input] = identifier[activation_fn] ( identifier[self] . identifier[input] )
identifier[self] . identifier[print_log] ( identifier[scope] + literal[string] + identifier[str] ( identifier[self] . identifier[input] . identifier[get_shape] ())) | def deconv2d(self, filter_size, output_channels, stride=1, padding='SAME', stoch=False, ladder=None, activation_fn=tf.nn.relu, b_value=0.0, s_value=1.0, bn=True):
"""
2D Deconvolutional Layer
:param filter_size: int. assumes square filter
:param output_channels: int
:param stride: int
:param padding: 'VALID' or 'SAME'
:param activation_fn: tf.nn function
:param b_value: float
:param s_value: float
"""
self.count['deconv'] += 1
scope = 'deconv_' + str(self.count['deconv'])
with tf.variable_scope(scope):
# Calculate the dimensions for deconv function
batch_size = tf.shape(self.input)[0]
input_height = tf.shape(self.input)[1]
input_width = tf.shape(self.input)[2]
if padding == 'VALID':
out_rows = (input_height - 1) * stride + filter_size
out_cols = (input_width - 1) * stride + filter_size # depends on [control=['if'], data=[]]
else: # padding == "SAME":
out_rows = input_height * stride
out_cols = input_width * stride
# Deconv function
input_channels = self.input.get_shape()[3]
output_shape = [filter_size, filter_size, output_channels, input_channels]
w = self.weight_variable(name='weights', shape=output_shape)
deconv_out_shape = tf.pack([batch_size, out_rows, out_cols, output_channels])
self.input = tf.nn.conv2d_transpose(self.input, w, deconv_out_shape, [1, stride, stride, 1], padding)
# Additional functions
if ladder is not None:
stoch = False
enc_mean = self.dec_ladder[self.stoch_count_dec][0]
enc_std = self.dec_ladder[self.stoch_count_dec][1]
self.stoch_count_dec += 1
with tf.variable_scope('ladder'):
input_shape = [enc_mean.get_shape()[1], enc_mean.get_shape()[2], enc_mean.get_shape()[3]]
w_std = self.weight_variable(name='weights_mean', shape=input_shape)
w_mean = self.weight_variable(name='weights_std', shape=input_shape)
mean = self.input * w_mean
std = tf.nn.softplus(self.input * w_std)
if ladder == 1: # LVAE Implementation
eps = 1e-10
new_std = 1 / ((enc_std + eps) ** 2 + (std + eps) ** 2)
new_mean = new_std * (enc_mean * (1 / (enc_std + eps) ** 2) + mean * (1 / (std + eps) ** 2))
self.input = new_mean + tf.random_normal(tf.shape(self.input)) * new_std # depends on [control=['if'], data=[]]
elif ladder == 2: # BLN Implementation
raise NotImplementedError # depends on [control=['if'], data=[]]
else:
self.input = mean + tf.random_normal(tf.shape(self.input)) * std # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['ladder']]
if stoch is True: # Draw sample from Normal Layer
(mean, std) = tf.split(3, 2, self.input)
self.input = mean + tf.random_normal(tf.shape(mean)) * std
output_channels = int(output_channels / 2) # depends on [control=['if'], data=[]]
if bn is True: # batch normalization
self.input = self.batch_norm(self.input) # depends on [control=['if'], data=[]]
if b_value is not None: # bias value
b = self.const_variable(name='bias', shape=[output_channels], value=b_value)
self.input = tf.add(self.input, b) # depends on [control=['if'], data=['b_value']]
if s_value is not None: # scale value
s = self.const_variable(name='scale', shape=[output_channels], value=s_value)
self.input = tf.multiply(self.input, s) # depends on [control=['if'], data=['s_value']]
if activation_fn is not None: # non-linear activation function
self.input = activation_fn(self.input) # depends on [control=['if'], data=['activation_fn']] # depends on [control=['with'], data=[]]
self.print_log(scope + ' output: ' + str(self.input.get_shape())) |
def update(self, statement):
"""
Update the provided statement.
"""
Statement = self.get_model('statement')
Tag = self.get_model('tag')
if hasattr(statement, 'id'):
statement.save()
else:
statement = Statement.objects.create(
text=statement.text,
search_text=self.tagger.get_bigram_pair_string(statement.text),
conversation=statement.conversation,
in_response_to=statement.in_response_to,
search_in_response_to=self.tagger.get_bigram_pair_string(statement.in_response_to),
created_at=statement.created_at
)
for _tag in statement.tags.all():
tag, _ = Tag.objects.get_or_create(name=_tag)
statement.tags.add(tag)
return statement | def function[update, parameter[self, statement]]:
constant[
Update the provided statement.
]
variable[Statement] assign[=] call[name[self].get_model, parameter[constant[statement]]]
variable[Tag] assign[=] call[name[self].get_model, parameter[constant[tag]]]
if call[name[hasattr], parameter[name[statement], constant[id]]] begin[:]
call[name[statement].save, parameter[]]
for taget[name[_tag]] in starred[call[name[statement].tags.all, parameter[]]] begin[:]
<ast.Tuple object at 0x7da1b1f46770> assign[=] call[name[Tag].objects.get_or_create, parameter[]]
call[name[statement].tags.add, parameter[name[tag]]]
return[name[statement]] | keyword[def] identifier[update] ( identifier[self] , identifier[statement] ):
literal[string]
identifier[Statement] = identifier[self] . identifier[get_model] ( literal[string] )
identifier[Tag] = identifier[self] . identifier[get_model] ( literal[string] )
keyword[if] identifier[hasattr] ( identifier[statement] , literal[string] ):
identifier[statement] . identifier[save] ()
keyword[else] :
identifier[statement] = identifier[Statement] . identifier[objects] . identifier[create] (
identifier[text] = identifier[statement] . identifier[text] ,
identifier[search_text] = identifier[self] . identifier[tagger] . identifier[get_bigram_pair_string] ( identifier[statement] . identifier[text] ),
identifier[conversation] = identifier[statement] . identifier[conversation] ,
identifier[in_response_to] = identifier[statement] . identifier[in_response_to] ,
identifier[search_in_response_to] = identifier[self] . identifier[tagger] . identifier[get_bigram_pair_string] ( identifier[statement] . identifier[in_response_to] ),
identifier[created_at] = identifier[statement] . identifier[created_at]
)
keyword[for] identifier[_tag] keyword[in] identifier[statement] . identifier[tags] . identifier[all] ():
identifier[tag] , identifier[_] = identifier[Tag] . identifier[objects] . identifier[get_or_create] ( identifier[name] = identifier[_tag] )
identifier[statement] . identifier[tags] . identifier[add] ( identifier[tag] )
keyword[return] identifier[statement] | def update(self, statement):
"""
Update the provided statement.
"""
Statement = self.get_model('statement')
Tag = self.get_model('tag')
if hasattr(statement, 'id'):
statement.save() # depends on [control=['if'], data=[]]
else:
statement = Statement.objects.create(text=statement.text, search_text=self.tagger.get_bigram_pair_string(statement.text), conversation=statement.conversation, in_response_to=statement.in_response_to, search_in_response_to=self.tagger.get_bigram_pair_string(statement.in_response_to), created_at=statement.created_at)
for _tag in statement.tags.all():
(tag, _) = Tag.objects.get_or_create(name=_tag)
statement.tags.add(tag) # depends on [control=['for'], data=['_tag']]
return statement |
def _create_archive(
self,
archive_name,
metadata):
'''
This adds an item in a DynamoDB table corresponding to a S3 object
Args
----
arhive_name: str
corresponds to the name of the Archive (e.g. )
Returns
-------
Dictionary with confirmation of upload
'''
archive_exists = False
try:
self.get_archive(archive_name)
archive_exists = True
except KeyError:
pass
if archive_exists:
raise KeyError(
"{} already exists. Use get_archive() to view".format(
archive_name))
self._table.put_item(Item=metadata) | def function[_create_archive, parameter[self, archive_name, metadata]]:
constant[
This adds an item in a DynamoDB table corresponding to a S3 object
Args
----
arhive_name: str
corresponds to the name of the Archive (e.g. )
Returns
-------
Dictionary with confirmation of upload
]
variable[archive_exists] assign[=] constant[False]
<ast.Try object at 0x7da1b0b38fa0>
if name[archive_exists] begin[:]
<ast.Raise object at 0x7da1b0bceb90>
call[name[self]._table.put_item, parameter[]] | keyword[def] identifier[_create_archive] (
identifier[self] ,
identifier[archive_name] ,
identifier[metadata] ):
literal[string]
identifier[archive_exists] = keyword[False]
keyword[try] :
identifier[self] . identifier[get_archive] ( identifier[archive_name] )
identifier[archive_exists] = keyword[True]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[if] identifier[archive_exists] :
keyword[raise] identifier[KeyError] (
literal[string] . identifier[format] (
identifier[archive_name] ))
identifier[self] . identifier[_table] . identifier[put_item] ( identifier[Item] = identifier[metadata] ) | def _create_archive(self, archive_name, metadata):
"""
This adds an item in a DynamoDB table corresponding to a S3 object
Args
----
arhive_name: str
corresponds to the name of the Archive (e.g. )
Returns
-------
Dictionary with confirmation of upload
"""
archive_exists = False
try:
self.get_archive(archive_name)
archive_exists = True # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
if archive_exists:
raise KeyError('{} already exists. Use get_archive() to view'.format(archive_name)) # depends on [control=['if'], data=[]]
self._table.put_item(Item=metadata) |
def refkeys(self,fields):
"returns {ModelClass:list_of_pkey_tuples}. see syncschema.RefKey. Don't use this yet."
# todo doc: better explanation of what refkeys are and how fields plays in
dd=collections.defaultdict(list)
if any(f not in self.REFKEYS for f in fields): raise ValueError(fields,'not all in',self.REFKEYS.keys())
for f in fields:
rk=self.REFKEYS[f]
for model in rk.refmodels: dd[model].extend(rk.pkeys(self,f))
return dd | def function[refkeys, parameter[self, fields]]:
constant[returns {ModelClass:list_of_pkey_tuples}. see syncschema.RefKey. Don't use this yet.]
variable[dd] assign[=] call[name[collections].defaultdict, parameter[name[list]]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b15b3bb0>]] begin[:]
<ast.Raise object at 0x7da1b1304100>
for taget[name[f]] in starred[name[fields]] begin[:]
variable[rk] assign[=] call[name[self].REFKEYS][name[f]]
for taget[name[model]] in starred[name[rk].refmodels] begin[:]
call[call[name[dd]][name[model]].extend, parameter[call[name[rk].pkeys, parameter[name[self], name[f]]]]]
return[name[dd]] | keyword[def] identifier[refkeys] ( identifier[self] , identifier[fields] ):
literal[string]
identifier[dd] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
keyword[if] identifier[any] ( identifier[f] keyword[not] keyword[in] identifier[self] . identifier[REFKEYS] keyword[for] identifier[f] keyword[in] identifier[fields] ): keyword[raise] identifier[ValueError] ( identifier[fields] , literal[string] , identifier[self] . identifier[REFKEYS] . identifier[keys] ())
keyword[for] identifier[f] keyword[in] identifier[fields] :
identifier[rk] = identifier[self] . identifier[REFKEYS] [ identifier[f] ]
keyword[for] identifier[model] keyword[in] identifier[rk] . identifier[refmodels] : identifier[dd] [ identifier[model] ]. identifier[extend] ( identifier[rk] . identifier[pkeys] ( identifier[self] , identifier[f] ))
keyword[return] identifier[dd] | def refkeys(self, fields):
"""returns {ModelClass:list_of_pkey_tuples}. see syncschema.RefKey. Don't use this yet.""" # todo doc: better explanation of what refkeys are and how fields plays in
dd = collections.defaultdict(list)
if any((f not in self.REFKEYS for f in fields)):
raise ValueError(fields, 'not all in', self.REFKEYS.keys()) # depends on [control=['if'], data=[]]
for f in fields:
rk = self.REFKEYS[f]
for model in rk.refmodels:
dd[model].extend(rk.pkeys(self, f)) # depends on [control=['for'], data=['model']] # depends on [control=['for'], data=['f']]
return dd |
def start(self, stages=(1, 2)):
"""
Starts the processes or threads in the internal pool and the threads,
which manage the **worker pool** input and output queues. The
**starting mode** is split into **two stages**, which can be initiated
seperately. After the first stage the **worker pool** processes or
threads are started and the ``NuMap._started`` event is set ``True``.
A call to the ``NuMap.next`` method **will** block. After the **second
stage** the ``NuMap._pool_putter`` and ``NuMap._pool_getter`` threads
will be running. The ``NuMap.next`` method should only be called **after**
this method returns.
Arguments:
- stages (``tuple``) [default: ``(1, 2)``] Specifies which stages of
the start process to execute, by default both stages.
"""
if 1 in stages:
if not self._started.isSet():
self._start_workers()
self._started.set()
if 2 in stages:
if not hasattr(self, '_pool_getter'):
self._start_managers() | def function[start, parameter[self, stages]]:
constant[
Starts the processes or threads in the internal pool and the threads,
which manage the **worker pool** input and output queues. The
**starting mode** is split into **two stages**, which can be initiated
seperately. After the first stage the **worker pool** processes or
threads are started and the ``NuMap._started`` event is set ``True``.
A call to the ``NuMap.next`` method **will** block. After the **second
stage** the ``NuMap._pool_putter`` and ``NuMap._pool_getter`` threads
will be running. The ``NuMap.next`` method should only be called **after**
this method returns.
Arguments:
- stages (``tuple``) [default: ``(1, 2)``] Specifies which stages of
the start process to execute, by default both stages.
]
if compare[constant[1] in name[stages]] begin[:]
if <ast.UnaryOp object at 0x7da1b25643d0> begin[:]
call[name[self]._start_workers, parameter[]]
call[name[self]._started.set, parameter[]]
if compare[constant[2] in name[stages]] begin[:]
if <ast.UnaryOp object at 0x7da1b25663b0> begin[:]
call[name[self]._start_managers, parameter[]] | keyword[def] identifier[start] ( identifier[self] , identifier[stages] =( literal[int] , literal[int] )):
literal[string]
keyword[if] literal[int] keyword[in] identifier[stages] :
keyword[if] keyword[not] identifier[self] . identifier[_started] . identifier[isSet] ():
identifier[self] . identifier[_start_workers] ()
identifier[self] . identifier[_started] . identifier[set] ()
keyword[if] literal[int] keyword[in] identifier[stages] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_start_managers] () | def start(self, stages=(1, 2)):
"""
Starts the processes or threads in the internal pool and the threads,
which manage the **worker pool** input and output queues. The
**starting mode** is split into **two stages**, which can be initiated
seperately. After the first stage the **worker pool** processes or
threads are started and the ``NuMap._started`` event is set ``True``.
A call to the ``NuMap.next`` method **will** block. After the **second
stage** the ``NuMap._pool_putter`` and ``NuMap._pool_getter`` threads
will be running. The ``NuMap.next`` method should only be called **after**
this method returns.
Arguments:
- stages (``tuple``) [default: ``(1, 2)``] Specifies which stages of
the start process to execute, by default both stages.
"""
if 1 in stages:
if not self._started.isSet():
self._start_workers()
self._started.set() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if 2 in stages:
if not hasattr(self, '_pool_getter'):
self._start_managers() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def xml_to_json(element, definition, required=False):
# TODO document tuple - it looks little too complex
"""Convert XML (ElementTree) to dictionary from a definition schema.
Definition schema can be a simple string - XPath or @attribute for
direct extraction or a complex one described by
* dictionary ``{key: 'xpath or @attribute', second: 'complex definition'}`` \
required parameters can be marked with * at the end
* list ``[xpath, [definition]]`` - create a list of all elements found by \
xpath, parse the parts with given definition if provided as second \
argument
* Callable - parse the element by given function, can be handy as a part \
of complex definition
:param element: ElementTree element
:type element: ElementTree.Element
:param definition: schema for the json
:type definition: Union[str, tuple, dict, list, Callable]
:param required: parsed value should be not None
:type required: bool
:return: parsed xml
:rtype: Union[dict, str, list]
"""
# handle simple definition
if isinstance(definition, str) and len(definition) > 0:
if definition[0] == '@': # test for attribute
return element.get(definition[1:])
# get tag text
else:
sub_element = element.find(definition)
if sub_element is None:
if required:
raise NotCompleteXmlException('Expecting {0} in element {1}'.format(definition, element.tag))
return None
return sub_element.text.strip() if sub_element.text else None
# handle tuple
elif isinstance(definition, tuple):
return _parse_tuple(element, definition, required)
# handle dict
elif isinstance(definition, dict):
return _parse_dict(element, definition)
# handle list
elif isinstance(definition, list):
return _parse_list(element, definition)
elif hasattr(definition, '__call__'):
return definition(element)
# default
else:
return element.text.strip() if element.text else None | def function[xml_to_json, parameter[element, definition, required]]:
constant[Convert XML (ElementTree) to dictionary from a definition schema.
Definition schema can be a simple string - XPath or @attribute for
direct extraction or a complex one described by
* dictionary ``{key: 'xpath or @attribute', second: 'complex definition'}`` required parameters can be marked with * at the end
* list ``[xpath, [definition]]`` - create a list of all elements found by xpath, parse the parts with given definition if provided as second argument
* Callable - parse the element by given function, can be handy as a part of complex definition
:param element: ElementTree element
:type element: ElementTree.Element
:param definition: schema for the json
:type definition: Union[str, tuple, dict, list, Callable]
:param required: parsed value should be not None
:type required: bool
:return: parsed xml
:rtype: Union[dict, str, list]
]
if <ast.BoolOp object at 0x7da207f983a0> begin[:]
if compare[call[name[definition]][constant[0]] equal[==] constant[@]] begin[:]
return[call[name[element].get, parameter[call[name[definition]][<ast.Slice object at 0x7da207f996f0>]]]] | keyword[def] identifier[xml_to_json] ( identifier[element] , identifier[definition] , identifier[required] = keyword[False] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[definition] , identifier[str] ) keyword[and] identifier[len] ( identifier[definition] )> literal[int] :
keyword[if] identifier[definition] [ literal[int] ]== literal[string] :
keyword[return] identifier[element] . identifier[get] ( identifier[definition] [ literal[int] :])
keyword[else] :
identifier[sub_element] = identifier[element] . identifier[find] ( identifier[definition] )
keyword[if] identifier[sub_element] keyword[is] keyword[None] :
keyword[if] identifier[required] :
keyword[raise] identifier[NotCompleteXmlException] ( literal[string] . identifier[format] ( identifier[definition] , identifier[element] . identifier[tag] ))
keyword[return] keyword[None]
keyword[return] identifier[sub_element] . identifier[text] . identifier[strip] () keyword[if] identifier[sub_element] . identifier[text] keyword[else] keyword[None]
keyword[elif] identifier[isinstance] ( identifier[definition] , identifier[tuple] ):
keyword[return] identifier[_parse_tuple] ( identifier[element] , identifier[definition] , identifier[required] )
keyword[elif] identifier[isinstance] ( identifier[definition] , identifier[dict] ):
keyword[return] identifier[_parse_dict] ( identifier[element] , identifier[definition] )
keyword[elif] identifier[isinstance] ( identifier[definition] , identifier[list] ):
keyword[return] identifier[_parse_list] ( identifier[element] , identifier[definition] )
keyword[elif] identifier[hasattr] ( identifier[definition] , literal[string] ):
keyword[return] identifier[definition] ( identifier[element] )
keyword[else] :
keyword[return] identifier[element] . identifier[text] . identifier[strip] () keyword[if] identifier[element] . identifier[text] keyword[else] keyword[None] | def xml_to_json(element, definition, required=False):
# TODO document tuple - it looks little too complex
"Convert XML (ElementTree) to dictionary from a definition schema.\n\n Definition schema can be a simple string - XPath or @attribute for\n direct extraction or a complex one described by\n\n * dictionary ``{key: 'xpath or @attribute', second: 'complex definition'}`` required parameters can be marked with * at the end\n * list ``[xpath, [definition]]`` - create a list of all elements found by xpath, parse the parts with given definition if provided as second argument\n * Callable - parse the element by given function, can be handy as a part of complex definition\n\n :param element: ElementTree element\n :type element: ElementTree.Element\n :param definition: schema for the json\n :type definition: Union[str, tuple, dict, list, Callable]\n :param required: parsed value should be not None\n :type required: bool\n :return: parsed xml\n :rtype: Union[dict, str, list]\n "
# handle simple definition
if isinstance(definition, str) and len(definition) > 0:
if definition[0] == '@': # test for attribute
return element.get(definition[1:]) # depends on [control=['if'], data=[]]
else:
# get tag text
sub_element = element.find(definition)
if sub_element is None:
if required:
raise NotCompleteXmlException('Expecting {0} in element {1}'.format(definition, element.tag)) # depends on [control=['if'], data=[]]
return None # depends on [control=['if'], data=[]]
return sub_element.text.strip() if sub_element.text else None # depends on [control=['if'], data=[]]
# handle tuple
elif isinstance(definition, tuple):
return _parse_tuple(element, definition, required) # depends on [control=['if'], data=[]]
# handle dict
elif isinstance(definition, dict):
return _parse_dict(element, definition) # depends on [control=['if'], data=[]]
# handle list
elif isinstance(definition, list):
return _parse_list(element, definition) # depends on [control=['if'], data=[]]
elif hasattr(definition, '__call__'):
return definition(element) # depends on [control=['if'], data=[]]
else:
# default
return element.text.strip() if element.text else None |
def Sum(*args: BitVec) -> BitVec:
"""Create sum expression.
:return:
"""
raw = z3.Sum([a.raw for a in args])
annotations = [] # type: Annotations
bitvecfuncs = []
for bv in args:
annotations += bv.annotations
if isinstance(bv, BitVecFunc):
bitvecfuncs.append(bv)
if len(bitvecfuncs) >= 2:
return BitVecFunc(raw=raw, func_name=None, input_=None, annotations=annotations)
elif len(bitvecfuncs) == 1:
return BitVecFunc(
raw=raw,
func_name=bitvecfuncs[0].func_name,
input_=bitvecfuncs[0].input_,
annotations=annotations,
)
return BitVec(raw, annotations) | def function[Sum, parameter[]]:
constant[Create sum expression.
:return:
]
variable[raw] assign[=] call[name[z3].Sum, parameter[<ast.ListComp object at 0x7da1b1ddcc10>]]
variable[annotations] assign[=] list[[]]
variable[bitvecfuncs] assign[=] list[[]]
for taget[name[bv]] in starred[name[args]] begin[:]
<ast.AugAssign object at 0x7da1b1ddfc70>
if call[name[isinstance], parameter[name[bv], name[BitVecFunc]]] begin[:]
call[name[bitvecfuncs].append, parameter[name[bv]]]
if compare[call[name[len], parameter[name[bitvecfuncs]]] greater_or_equal[>=] constant[2]] begin[:]
return[call[name[BitVecFunc], parameter[]]]
return[call[name[BitVec], parameter[name[raw], name[annotations]]]] | keyword[def] identifier[Sum] (* identifier[args] : identifier[BitVec] )-> identifier[BitVec] :
literal[string]
identifier[raw] = identifier[z3] . identifier[Sum] ([ identifier[a] . identifier[raw] keyword[for] identifier[a] keyword[in] identifier[args] ])
identifier[annotations] =[]
identifier[bitvecfuncs] =[]
keyword[for] identifier[bv] keyword[in] identifier[args] :
identifier[annotations] += identifier[bv] . identifier[annotations]
keyword[if] identifier[isinstance] ( identifier[bv] , identifier[BitVecFunc] ):
identifier[bitvecfuncs] . identifier[append] ( identifier[bv] )
keyword[if] identifier[len] ( identifier[bitvecfuncs] )>= literal[int] :
keyword[return] identifier[BitVecFunc] ( identifier[raw] = identifier[raw] , identifier[func_name] = keyword[None] , identifier[input_] = keyword[None] , identifier[annotations] = identifier[annotations] )
keyword[elif] identifier[len] ( identifier[bitvecfuncs] )== literal[int] :
keyword[return] identifier[BitVecFunc] (
identifier[raw] = identifier[raw] ,
identifier[func_name] = identifier[bitvecfuncs] [ literal[int] ]. identifier[func_name] ,
identifier[input_] = identifier[bitvecfuncs] [ literal[int] ]. identifier[input_] ,
identifier[annotations] = identifier[annotations] ,
)
keyword[return] identifier[BitVec] ( identifier[raw] , identifier[annotations] ) | def Sum(*args: BitVec) -> BitVec:
"""Create sum expression.
:return:
"""
raw = z3.Sum([a.raw for a in args])
annotations = [] # type: Annotations
bitvecfuncs = []
for bv in args:
annotations += bv.annotations
if isinstance(bv, BitVecFunc):
bitvecfuncs.append(bv) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bv']]
if len(bitvecfuncs) >= 2:
return BitVecFunc(raw=raw, func_name=None, input_=None, annotations=annotations) # depends on [control=['if'], data=[]]
elif len(bitvecfuncs) == 1:
return BitVecFunc(raw=raw, func_name=bitvecfuncs[0].func_name, input_=bitvecfuncs[0].input_, annotations=annotations) # depends on [control=['if'], data=[]]
return BitVec(raw, annotations) |
def samples(dataset='imagenet', index=0, batchsize=1, shape=(224, 224),
data_format='channels_last'):
''' Returns a batch of example images and the corresponding labels
Parameters
----------
dataset : string
The data set to load (options: imagenet, mnist, cifar10,
cifar100, fashionMNIST)
index : int
For each data set 20 example images exist. The returned batch
contains the images with index [index, index + 1, index + 2, ...]
batchsize : int
Size of batch.
shape : list of integers
The shape of the returned image (only relevant for Imagenet).
data_format : str
"channels_first" or "channels_last"
Returns
-------
images : array_like
The batch of example images
labels : array of int
The labels associated with the images.
'''
from PIL import Image
images, labels = [], []
basepath = os.path.dirname(__file__)
samplepath = os.path.join(basepath, 'data')
files = os.listdir(samplepath)
for idx in range(index, index + batchsize):
i = idx % 20
# get filename and label
file = [n for n in files if '{}_{:02d}_'.format(dataset, i) in n][0]
label = int(file.split('.')[0].split('_')[-1])
# open file
path = os.path.join(samplepath, file)
image = Image.open(path)
if dataset == 'imagenet':
image = image.resize(shape)
image = np.asarray(image, dtype=np.float32)
if dataset != 'mnist' and data_format == 'channels_first':
image = np.transpose(image, (2, 0, 1))
images.append(image)
labels.append(label)
labels = np.array(labels)
images = np.stack(images)
return images, labels | def function[samples, parameter[dataset, index, batchsize, shape, data_format]]:
constant[ Returns a batch of example images and the corresponding labels
Parameters
----------
dataset : string
The data set to load (options: imagenet, mnist, cifar10,
cifar100, fashionMNIST)
index : int
For each data set 20 example images exist. The returned batch
contains the images with index [index, index + 1, index + 2, ...]
batchsize : int
Size of batch.
shape : list of integers
The shape of the returned image (only relevant for Imagenet).
data_format : str
"channels_first" or "channels_last"
Returns
-------
images : array_like
The batch of example images
labels : array of int
The labels associated with the images.
]
from relative_module[PIL] import module[Image]
<ast.Tuple object at 0x7da1b23449a0> assign[=] tuple[[<ast.List object at 0x7da1b2346080>, <ast.List object at 0x7da1b2344ee0>]]
variable[basepath] assign[=] call[name[os].path.dirname, parameter[name[__file__]]]
variable[samplepath] assign[=] call[name[os].path.join, parameter[name[basepath], constant[data]]]
variable[files] assign[=] call[name[os].listdir, parameter[name[samplepath]]]
for taget[name[idx]] in starred[call[name[range], parameter[name[index], binary_operation[name[index] + name[batchsize]]]]] begin[:]
variable[i] assign[=] binary_operation[name[idx] <ast.Mod object at 0x7da2590d6920> constant[20]]
variable[file] assign[=] call[<ast.ListComp object at 0x7da20c992140>][constant[0]]
variable[label] assign[=] call[name[int], parameter[call[call[call[call[name[file].split, parameter[constant[.]]]][constant[0]].split, parameter[constant[_]]]][<ast.UnaryOp object at 0x7da18f58ec20>]]]
variable[path] assign[=] call[name[os].path.join, parameter[name[samplepath], name[file]]]
variable[image] assign[=] call[name[Image].open, parameter[name[path]]]
if compare[name[dataset] equal[==] constant[imagenet]] begin[:]
variable[image] assign[=] call[name[image].resize, parameter[name[shape]]]
variable[image] assign[=] call[name[np].asarray, parameter[name[image]]]
if <ast.BoolOp object at 0x7da20c6c58d0> begin[:]
variable[image] assign[=] call[name[np].transpose, parameter[name[image], tuple[[<ast.Constant object at 0x7da20c6c6dd0>, <ast.Constant object at 0x7da20c6c7ca0>, <ast.Constant object at 0x7da20c6c6950>]]]]
call[name[images].append, parameter[name[image]]]
call[name[labels].append, parameter[name[label]]]
variable[labels] assign[=] call[name[np].array, parameter[name[labels]]]
variable[images] assign[=] call[name[np].stack, parameter[name[images]]]
return[tuple[[<ast.Name object at 0x7da18dc9a2c0>, <ast.Name object at 0x7da18dc9abc0>]]] | keyword[def] identifier[samples] ( identifier[dataset] = literal[string] , identifier[index] = literal[int] , identifier[batchsize] = literal[int] , identifier[shape] =( literal[int] , literal[int] ),
identifier[data_format] = literal[string] ):
literal[string]
keyword[from] identifier[PIL] keyword[import] identifier[Image]
identifier[images] , identifier[labels] =[],[]
identifier[basepath] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] )
identifier[samplepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[basepath] , literal[string] )
identifier[files] = identifier[os] . identifier[listdir] ( identifier[samplepath] )
keyword[for] identifier[idx] keyword[in] identifier[range] ( identifier[index] , identifier[index] + identifier[batchsize] ):
identifier[i] = identifier[idx] % literal[int]
identifier[file] =[ identifier[n] keyword[for] identifier[n] keyword[in] identifier[files] keyword[if] literal[string] . identifier[format] ( identifier[dataset] , identifier[i] ) keyword[in] identifier[n] ][ literal[int] ]
identifier[label] = identifier[int] ( identifier[file] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[- literal[int] ])
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[samplepath] , identifier[file] )
identifier[image] = identifier[Image] . identifier[open] ( identifier[path] )
keyword[if] identifier[dataset] == literal[string] :
identifier[image] = identifier[image] . identifier[resize] ( identifier[shape] )
identifier[image] = identifier[np] . identifier[asarray] ( identifier[image] , identifier[dtype] = identifier[np] . identifier[float32] )
keyword[if] identifier[dataset] != literal[string] keyword[and] identifier[data_format] == literal[string] :
identifier[image] = identifier[np] . identifier[transpose] ( identifier[image] ,( literal[int] , literal[int] , literal[int] ))
identifier[images] . identifier[append] ( identifier[image] )
identifier[labels] . identifier[append] ( identifier[label] )
identifier[labels] = identifier[np] . identifier[array] ( identifier[labels] )
identifier[images] = identifier[np] . identifier[stack] ( identifier[images] )
keyword[return] identifier[images] , identifier[labels] | def samples(dataset='imagenet', index=0, batchsize=1, shape=(224, 224), data_format='channels_last'):
""" Returns a batch of example images and the corresponding labels
Parameters
----------
dataset : string
The data set to load (options: imagenet, mnist, cifar10,
cifar100, fashionMNIST)
index : int
For each data set 20 example images exist. The returned batch
contains the images with index [index, index + 1, index + 2, ...]
batchsize : int
Size of batch.
shape : list of integers
The shape of the returned image (only relevant for Imagenet).
data_format : str
"channels_first" or "channels_last"
Returns
-------
images : array_like
The batch of example images
labels : array of int
The labels associated with the images.
"""
from PIL import Image
(images, labels) = ([], [])
basepath = os.path.dirname(__file__)
samplepath = os.path.join(basepath, 'data')
files = os.listdir(samplepath)
for idx in range(index, index + batchsize):
i = idx % 20
# get filename and label
file = [n for n in files if '{}_{:02d}_'.format(dataset, i) in n][0]
label = int(file.split('.')[0].split('_')[-1])
# open file
path = os.path.join(samplepath, file)
image = Image.open(path)
if dataset == 'imagenet':
image = image.resize(shape) # depends on [control=['if'], data=[]]
image = np.asarray(image, dtype=np.float32)
if dataset != 'mnist' and data_format == 'channels_first':
image = np.transpose(image, (2, 0, 1)) # depends on [control=['if'], data=[]]
images.append(image)
labels.append(label) # depends on [control=['for'], data=['idx']]
labels = np.array(labels)
images = np.stack(images)
return (images, labels) |
def _resolve_registered(self, event_handler, full_config) -> typing.Generator:
"""
Resolve registered filters
:param event_handler:
:param full_config:
:return:
"""
for record in self._registered:
filter_ = record.resolve(self._dispatcher, event_handler, full_config)
if filter_:
yield filter_
if full_config:
raise NameError('Invalid filter name(s): \'' + '\', '.join(full_config.keys()) + '\'') | def function[_resolve_registered, parameter[self, event_handler, full_config]]:
constant[
Resolve registered filters
:param event_handler:
:param full_config:
:return:
]
for taget[name[record]] in starred[name[self]._registered] begin[:]
variable[filter_] assign[=] call[name[record].resolve, parameter[name[self]._dispatcher, name[event_handler], name[full_config]]]
if name[filter_] begin[:]
<ast.Yield object at 0x7da1b18310c0>
if name[full_config] begin[:]
<ast.Raise object at 0x7da1b1831a50> | keyword[def] identifier[_resolve_registered] ( identifier[self] , identifier[event_handler] , identifier[full_config] )-> identifier[typing] . identifier[Generator] :
literal[string]
keyword[for] identifier[record] keyword[in] identifier[self] . identifier[_registered] :
identifier[filter_] = identifier[record] . identifier[resolve] ( identifier[self] . identifier[_dispatcher] , identifier[event_handler] , identifier[full_config] )
keyword[if] identifier[filter_] :
keyword[yield] identifier[filter_]
keyword[if] identifier[full_config] :
keyword[raise] identifier[NameError] ( literal[string] + literal[string] . identifier[join] ( identifier[full_config] . identifier[keys] ())+ literal[string] ) | def _resolve_registered(self, event_handler, full_config) -> typing.Generator:
"""
Resolve registered filters
:param event_handler:
:param full_config:
:return:
"""
for record in self._registered:
filter_ = record.resolve(self._dispatcher, event_handler, full_config)
if filter_:
yield filter_ # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['record']]
if full_config:
raise NameError("Invalid filter name(s): '" + "', ".join(full_config.keys()) + "'") # depends on [control=['if'], data=[]] |
def _match_data_sets(x,y):
"""
Makes sure everything is the same shape. "Intelligently".
"""
# Handle the None for x or y
if x is None:
# If x is none, y can be either [1,2] or [[1,2],[1,2]]
if _fun.is_iterable(y[0]):
# make an array of arrays to match
x = []
for n in range(len(y)):
x.append(list(range(len(y[n]))))
else: x = list(range(len(y)))
if y is None:
# If x is none, y can be either [1,2] or [[1,2],[1,2]]
if _fun.is_iterable(x[0]):
# make an array of arrays to match
y = []
for n in range(len(x)):
y.append(list(range(len(x[n]))))
else: y = list(range(len(x)))
# At this point they should be matched, but may still be 1D
# Default behavior: if all elements are numbers in both, assume they match
if _fun.elements_are_numbers(x) and _fun.elements_are_numbers(y):
x = [x]
y = [y]
# Second default behavior: shared array [1,2,3], [[1,2,1],[1,2,1]] or vis versa
if _fun.elements_are_numbers(x) and not _fun.elements_are_numbers(y): x = [x]*len(y)
if _fun.elements_are_numbers(y) and not _fun.elements_are_numbers(x): y = [y]*len(x)
# Clean up any remaining Nones
for n in range(len(x)):
if x[n] is None: x[n] = list(range(len(y[n])))
if y[n] is None: y[n] = list(range(len(x[n])))
return x, y | def function[_match_data_sets, parameter[x, y]]:
constant[
Makes sure everything is the same shape. "Intelligently".
]
if compare[name[x] is constant[None]] begin[:]
if call[name[_fun].is_iterable, parameter[call[name[y]][constant[0]]]] begin[:]
variable[x] assign[=] list[[]]
for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[y]]]]]] begin[:]
call[name[x].append, parameter[call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[call[name[y]][name[n]]]]]]]]]]
if compare[name[y] is constant[None]] begin[:]
if call[name[_fun].is_iterable, parameter[call[name[x]][constant[0]]]] begin[:]
variable[y] assign[=] list[[]]
for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[x]]]]]] begin[:]
call[name[y].append, parameter[call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[call[name[x]][name[n]]]]]]]]]]
if <ast.BoolOp object at 0x7da2047e8640> begin[:]
variable[x] assign[=] list[[<ast.Name object at 0x7da2047eb640>]]
variable[y] assign[=] list[[<ast.Name object at 0x7da2047eb1c0>]]
if <ast.BoolOp object at 0x7da2047ea440> begin[:]
variable[x] assign[=] binary_operation[list[[<ast.Name object at 0x7da2047eacb0>]] * call[name[len], parameter[name[y]]]]
if <ast.BoolOp object at 0x7da2047e9f90> begin[:]
variable[y] assign[=] binary_operation[list[[<ast.Name object at 0x7da18f09cf70>]] * call[name[len], parameter[name[x]]]]
for taget[name[n]] in starred[call[name[range], parameter[call[name[len], parameter[name[x]]]]]] begin[:]
if compare[call[name[x]][name[n]] is constant[None]] begin[:]
call[name[x]][name[n]] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[call[name[y]][name[n]]]]]]]]
if compare[call[name[y]][name[n]] is constant[None]] begin[:]
call[name[y]][name[n]] assign[=] call[name[list], parameter[call[name[range], parameter[call[name[len], parameter[call[name[x]][name[n]]]]]]]]
return[tuple[[<ast.Name object at 0x7da18ede7f40>, <ast.Name object at 0x7da18ede4130>]]] | keyword[def] identifier[_match_data_sets] ( identifier[x] , identifier[y] ):
literal[string]
keyword[if] identifier[x] keyword[is] keyword[None] :
keyword[if] identifier[_fun] . identifier[is_iterable] ( identifier[y] [ literal[int] ]):
identifier[x] =[]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[y] )):
identifier[x] . identifier[append] ( identifier[list] ( identifier[range] ( identifier[len] ( identifier[y] [ identifier[n] ]))))
keyword[else] : identifier[x] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[y] )))
keyword[if] identifier[y] keyword[is] keyword[None] :
keyword[if] identifier[_fun] . identifier[is_iterable] ( identifier[x] [ literal[int] ]):
identifier[y] =[]
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[x] )):
identifier[y] . identifier[append] ( identifier[list] ( identifier[range] ( identifier[len] ( identifier[x] [ identifier[n] ]))))
keyword[else] : identifier[y] = identifier[list] ( identifier[range] ( identifier[len] ( identifier[x] )))
keyword[if] identifier[_fun] . identifier[elements_are_numbers] ( identifier[x] ) keyword[and] identifier[_fun] . identifier[elements_are_numbers] ( identifier[y] ):
identifier[x] =[ identifier[x] ]
identifier[y] =[ identifier[y] ]
keyword[if] identifier[_fun] . identifier[elements_are_numbers] ( identifier[x] ) keyword[and] keyword[not] identifier[_fun] . identifier[elements_are_numbers] ( identifier[y] ): identifier[x] =[ identifier[x] ]* identifier[len] ( identifier[y] )
keyword[if] identifier[_fun] . identifier[elements_are_numbers] ( identifier[y] ) keyword[and] keyword[not] identifier[_fun] . identifier[elements_are_numbers] ( identifier[x] ): identifier[y] =[ identifier[y] ]* identifier[len] ( identifier[x] )
keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[x] )):
keyword[if] identifier[x] [ identifier[n] ] keyword[is] keyword[None] : identifier[x] [ identifier[n] ]= identifier[list] ( identifier[range] ( identifier[len] ( identifier[y] [ identifier[n] ])))
keyword[if] identifier[y] [ identifier[n] ] keyword[is] keyword[None] : identifier[y] [ identifier[n] ]= identifier[list] ( identifier[range] ( identifier[len] ( identifier[x] [ identifier[n] ])))
keyword[return] identifier[x] , identifier[y] | def _match_data_sets(x, y):
"""
Makes sure everything is the same shape. "Intelligently".
"""
# Handle the None for x or y
if x is None:
# If x is none, y can be either [1,2] or [[1,2],[1,2]]
if _fun.is_iterable(y[0]):
# make an array of arrays to match
x = []
for n in range(len(y)):
x.append(list(range(len(y[n])))) # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=[]]
else:
x = list(range(len(y))) # depends on [control=['if'], data=['x']]
if y is None:
# If x is none, y can be either [1,2] or [[1,2],[1,2]]
if _fun.is_iterable(x[0]):
# make an array of arrays to match
y = []
for n in range(len(x)):
y.append(list(range(len(x[n])))) # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=[]]
else:
y = list(range(len(x))) # depends on [control=['if'], data=['y']]
# At this point they should be matched, but may still be 1D
# Default behavior: if all elements are numbers in both, assume they match
if _fun.elements_are_numbers(x) and _fun.elements_are_numbers(y):
x = [x]
y = [y] # depends on [control=['if'], data=[]]
# Second default behavior: shared array [1,2,3], [[1,2,1],[1,2,1]] or vis versa
if _fun.elements_are_numbers(x) and (not _fun.elements_are_numbers(y)):
x = [x] * len(y) # depends on [control=['if'], data=[]]
if _fun.elements_are_numbers(y) and (not _fun.elements_are_numbers(x)):
y = [y] * len(x) # depends on [control=['if'], data=[]]
# Clean up any remaining Nones
for n in range(len(x)):
if x[n] is None:
x[n] = list(range(len(y[n]))) # depends on [control=['if'], data=[]]
if y[n] is None:
y[n] = list(range(len(x[n]))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
return (x, y) |
def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPDate()
if "defaultValue" in j:
v.value = j['defaultValue']
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName']
elif 'name' in j:
v.paramName = j['name']
#from datetime import datetime
#if isinstance(value, datetime):
#v.value = local_time_to_online(value)
#else:
#v.value = value
#v.paramName = j['paramName']
return v | def function[fromJSON, parameter[value]]:
constant[loads the GP object from a JSON string ]
variable[j] assign[=] call[name[json].loads, parameter[name[value]]]
variable[v] assign[=] call[name[GPDate], parameter[]]
if compare[constant[defaultValue] in name[j]] begin[:]
name[v].value assign[=] call[name[j]][constant[defaultValue]]
if compare[constant[paramName] in name[j]] begin[:]
name[v].paramName assign[=] call[name[j]][constant[paramName]]
return[name[v]] | keyword[def] identifier[fromJSON] ( identifier[value] ):
literal[string]
identifier[j] = identifier[json] . identifier[loads] ( identifier[value] )
identifier[v] = identifier[GPDate] ()
keyword[if] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[value] = identifier[j] [ literal[string] ]
keyword[else] :
identifier[v] . identifier[value] = identifier[j] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[paramName] = identifier[j] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[j] :
identifier[v] . identifier[paramName] = identifier[j] [ literal[string] ]
keyword[return] identifier[v] | def fromJSON(value):
"""loads the GP object from a JSON string """
j = json.loads(value)
v = GPDate()
if 'defaultValue' in j:
v.value = j['defaultValue'] # depends on [control=['if'], data=['j']]
else:
v.value = j['value']
if 'paramName' in j:
v.paramName = j['paramName'] # depends on [control=['if'], data=['j']]
elif 'name' in j:
v.paramName = j['name'] # depends on [control=['if'], data=['j']]
#from datetime import datetime
#if isinstance(value, datetime):
#v.value = local_time_to_online(value)
#else:
#v.value = value
#v.paramName = j['paramName']
return v |
def plot_file_distances(dist_matrix):
"""
Plots dist_matrix
Parameters
----------
dist_matrix: np.ndarray
"""
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(111)
ax.matshow(dist_matrix, interpolation='nearest',
cmap=plt.cm.get_cmap('PuBu')) | def function[plot_file_distances, parameter[dist_matrix]]:
constant[
Plots dist_matrix
Parameters
----------
dist_matrix: np.ndarray
]
import module[matplotlib.pyplot] as alias[plt]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
call[name[ax].matshow, parameter[name[dist_matrix]]] | keyword[def] identifier[plot_file_distances] ( identifier[dist_matrix] ):
literal[string]
keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt]
identifier[fig] = identifier[plt] . identifier[figure] ()
identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] )
identifier[ax] . identifier[matshow] ( identifier[dist_matrix] , identifier[interpolation] = literal[string] ,
identifier[cmap] = identifier[plt] . identifier[cm] . identifier[get_cmap] ( literal[string] )) | def plot_file_distances(dist_matrix):
"""
Plots dist_matrix
Parameters
----------
dist_matrix: np.ndarray
"""
import matplotlib.pyplot as plt
fig = plt.figure()
ax = fig.add_subplot(111)
ax.matshow(dist_matrix, interpolation='nearest', cmap=plt.cm.get_cmap('PuBu')) |
def stddev(x, sample_axis=0, keepdims=False, name=None):
"""Estimate standard deviation using samples.
Given `N` samples of scalar valued random variable `X`, standard deviation may
be estimated as
```none
Stddev[X] := Sqrt[Var[X]],
Var[X] := N^{-1} sum_{n=1}^N (X_n - Xbar) Conj{(X_n - Xbar)},
Xbar := N^{-1} sum_{n=1}^N X_n
```
```python
x = tf.random_normal(shape=(100, 2, 3))
# stddev[i, j] is the sample standard deviation of the (i, j) batch member.
stddev = tfp.stats.stddev(x, sample_axis=0)
```
Scaling a unit normal by a standard deviation produces normal samples
with that standard deviation.
```python
observed_data = read_data_samples(...)
stddev = tfp.stats.stddev(observed_data)
# Make fake_data with the same standard deviation as observed_data.
fake_data = stddev * tf.random_normal(shape=(100,))
```
Notice we divide by `N` (the numpy default), which does not create `NaN`
when `N = 1`, but is slightly biased.
Args:
x: A numeric `Tensor` holding samples.
sample_axis: Scalar or vector `Tensor` designating axis holding samples, or
`None` (meaning all axis hold samples).
Default value: `0` (leftmost dimension).
keepdims: Boolean. Whether to keep the sample axis as singletons.
name: Python `str` name prefixed to Ops created by this function.
Default value: `None` (i.e., `'stddev'`).
Returns:
stddev: A `Tensor` of same `dtype` as the `x`, and rank equal to
`rank(x) - len(sample_axis)`
"""
with tf.compat.v1.name_scope(name, 'stddev', values=[x, sample_axis]):
return tf.sqrt(variance(x, sample_axis=sample_axis, keepdims=keepdims)) | def function[stddev, parameter[x, sample_axis, keepdims, name]]:
constant[Estimate standard deviation using samples.
Given `N` samples of scalar valued random variable `X`, standard deviation may
be estimated as
```none
Stddev[X] := Sqrt[Var[X]],
Var[X] := N^{-1} sum_{n=1}^N (X_n - Xbar) Conj{(X_n - Xbar)},
Xbar := N^{-1} sum_{n=1}^N X_n
```
```python
x = tf.random_normal(shape=(100, 2, 3))
# stddev[i, j] is the sample standard deviation of the (i, j) batch member.
stddev = tfp.stats.stddev(x, sample_axis=0)
```
Scaling a unit normal by a standard deviation produces normal samples
with that standard deviation.
```python
observed_data = read_data_samples(...)
stddev = tfp.stats.stddev(observed_data)
# Make fake_data with the same standard deviation as observed_data.
fake_data = stddev * tf.random_normal(shape=(100,))
```
Notice we divide by `N` (the numpy default), which does not create `NaN`
when `N = 1`, but is slightly biased.
Args:
x: A numeric `Tensor` holding samples.
sample_axis: Scalar or vector `Tensor` designating axis holding samples, or
`None` (meaning all axis hold samples).
Default value: `0` (leftmost dimension).
keepdims: Boolean. Whether to keep the sample axis as singletons.
name: Python `str` name prefixed to Ops created by this function.
Default value: `None` (i.e., `'stddev'`).
Returns:
stddev: A `Tensor` of same `dtype` as the `x`, and rank equal to
`rank(x) - len(sample_axis)`
]
with call[name[tf].compat.v1.name_scope, parameter[name[name], constant[stddev]]] begin[:]
return[call[name[tf].sqrt, parameter[call[name[variance], parameter[name[x]]]]]] | keyword[def] identifier[stddev] ( identifier[x] , identifier[sample_axis] = literal[int] , identifier[keepdims] = keyword[False] , identifier[name] = keyword[None] ):
literal[string]
keyword[with] identifier[tf] . identifier[compat] . identifier[v1] . identifier[name_scope] ( identifier[name] , literal[string] , identifier[values] =[ identifier[x] , identifier[sample_axis] ]):
keyword[return] identifier[tf] . identifier[sqrt] ( identifier[variance] ( identifier[x] , identifier[sample_axis] = identifier[sample_axis] , identifier[keepdims] = identifier[keepdims] )) | def stddev(x, sample_axis=0, keepdims=False, name=None):
"""Estimate standard deviation using samples.
Given `N` samples of scalar valued random variable `X`, standard deviation may
be estimated as
```none
Stddev[X] := Sqrt[Var[X]],
Var[X] := N^{-1} sum_{n=1}^N (X_n - Xbar) Conj{(X_n - Xbar)},
Xbar := N^{-1} sum_{n=1}^N X_n
```
```python
x = tf.random_normal(shape=(100, 2, 3))
# stddev[i, j] is the sample standard deviation of the (i, j) batch member.
stddev = tfp.stats.stddev(x, sample_axis=0)
```
Scaling a unit normal by a standard deviation produces normal samples
with that standard deviation.
```python
observed_data = read_data_samples(...)
stddev = tfp.stats.stddev(observed_data)
# Make fake_data with the same standard deviation as observed_data.
fake_data = stddev * tf.random_normal(shape=(100,))
```
Notice we divide by `N` (the numpy default), which does not create `NaN`
when `N = 1`, but is slightly biased.
Args:
x: A numeric `Tensor` holding samples.
sample_axis: Scalar or vector `Tensor` designating axis holding samples, or
`None` (meaning all axis hold samples).
Default value: `0` (leftmost dimension).
keepdims: Boolean. Whether to keep the sample axis as singletons.
name: Python `str` name prefixed to Ops created by this function.
Default value: `None` (i.e., `'stddev'`).
Returns:
stddev: A `Tensor` of same `dtype` as the `x`, and rank equal to
`rank(x) - len(sample_axis)`
"""
with tf.compat.v1.name_scope(name, 'stddev', values=[x, sample_axis]):
return tf.sqrt(variance(x, sample_axis=sample_axis, keepdims=keepdims)) # depends on [control=['with'], data=[]] |
def OnClearGlobals(self, event):
"""Clear globals event handler"""
msg = _("Deleting globals and reloading modules cannot be undone."
" Proceed?")
short_msg = _("Really delete globals and modules?")
choice = self.main_window.interfaces.get_warning_choice(msg, short_msg)
if choice:
self.main_window.grid.actions.clear_globals_reload_modules()
statustext = _("Globals cleared and base modules reloaded.")
post_command_event(self.main_window, self.main_window.StatusBarMsg,
text=statustext) | def function[OnClearGlobals, parameter[self, event]]:
constant[Clear globals event handler]
variable[msg] assign[=] call[name[_], parameter[constant[Deleting globals and reloading modules cannot be undone. Proceed?]]]
variable[short_msg] assign[=] call[name[_], parameter[constant[Really delete globals and modules?]]]
variable[choice] assign[=] call[name[self].main_window.interfaces.get_warning_choice, parameter[name[msg], name[short_msg]]]
if name[choice] begin[:]
call[name[self].main_window.grid.actions.clear_globals_reload_modules, parameter[]]
variable[statustext] assign[=] call[name[_], parameter[constant[Globals cleared and base modules reloaded.]]]
call[name[post_command_event], parameter[name[self].main_window, name[self].main_window.StatusBarMsg]] | keyword[def] identifier[OnClearGlobals] ( identifier[self] , identifier[event] ):
literal[string]
identifier[msg] = identifier[_] ( literal[string]
literal[string] )
identifier[short_msg] = identifier[_] ( literal[string] )
identifier[choice] = identifier[self] . identifier[main_window] . identifier[interfaces] . identifier[get_warning_choice] ( identifier[msg] , identifier[short_msg] )
keyword[if] identifier[choice] :
identifier[self] . identifier[main_window] . identifier[grid] . identifier[actions] . identifier[clear_globals_reload_modules] ()
identifier[statustext] = identifier[_] ( literal[string] )
identifier[post_command_event] ( identifier[self] . identifier[main_window] , identifier[self] . identifier[main_window] . identifier[StatusBarMsg] ,
identifier[text] = identifier[statustext] ) | def OnClearGlobals(self, event):
"""Clear globals event handler"""
msg = _('Deleting globals and reloading modules cannot be undone. Proceed?')
short_msg = _('Really delete globals and modules?')
choice = self.main_window.interfaces.get_warning_choice(msg, short_msg)
if choice:
self.main_window.grid.actions.clear_globals_reload_modules()
statustext = _('Globals cleared and base modules reloaded.')
post_command_event(self.main_window, self.main_window.StatusBarMsg, text=statustext) # depends on [control=['if'], data=[]] |
def search():
""" Search a movie on TMDB.
"""
redis_key = 's_%s' % request.args['query'].lower()
cached = redis_ro_conn.get(redis_key)
if cached:
return Response(cached)
else:
try:
found = get_on_tmdb(u'/search/movie', query=request.args['query'])
movies = []
for movie in found['results']:
cast = get_on_tmdb(u'/movie/%s/casts' % movie['id'])
year = datetime.strptime(movie['release_date'], '%Y-%m-%d').year if movie['release_date'] else None
movies.append({'title': movie['original_title'],
'directors': [x['name'] for x in cast['crew'] if x['department'] == 'Directing' and x['job'] == 'Director'],
'year': year,
'_tmdb_id': movie['id']})
except requests.HTTPError as err:
return Response('TMDB API error: %s' % str(err), status=err.response.status_code)
json_response = json.dumps({'movies': movies})
redis_conn.setex(redis_key, app.config['CACHE_TTL'], json_response)
return Response(json_response) | def function[search, parameter[]]:
constant[ Search a movie on TMDB.
]
variable[redis_key] assign[=] binary_operation[constant[s_%s] <ast.Mod object at 0x7da2590d6920> call[call[name[request].args][constant[query]].lower, parameter[]]]
variable[cached] assign[=] call[name[redis_ro_conn].get, parameter[name[redis_key]]]
if name[cached] begin[:]
return[call[name[Response], parameter[name[cached]]]] | keyword[def] identifier[search] ():
literal[string]
identifier[redis_key] = literal[string] % identifier[request] . identifier[args] [ literal[string] ]. identifier[lower] ()
identifier[cached] = identifier[redis_ro_conn] . identifier[get] ( identifier[redis_key] )
keyword[if] identifier[cached] :
keyword[return] identifier[Response] ( identifier[cached] )
keyword[else] :
keyword[try] :
identifier[found] = identifier[get_on_tmdb] ( literal[string] , identifier[query] = identifier[request] . identifier[args] [ literal[string] ])
identifier[movies] =[]
keyword[for] identifier[movie] keyword[in] identifier[found] [ literal[string] ]:
identifier[cast] = identifier[get_on_tmdb] ( literal[string] % identifier[movie] [ literal[string] ])
identifier[year] = identifier[datetime] . identifier[strptime] ( identifier[movie] [ literal[string] ], literal[string] ). identifier[year] keyword[if] identifier[movie] [ literal[string] ] keyword[else] keyword[None]
identifier[movies] . identifier[append] ({ literal[string] : identifier[movie] [ literal[string] ],
literal[string] :[ identifier[x] [ literal[string] ] keyword[for] identifier[x] keyword[in] identifier[cast] [ literal[string] ] keyword[if] identifier[x] [ literal[string] ]== literal[string] keyword[and] identifier[x] [ literal[string] ]== literal[string] ],
literal[string] : identifier[year] ,
literal[string] : identifier[movie] [ literal[string] ]})
keyword[except] identifier[requests] . identifier[HTTPError] keyword[as] identifier[err] :
keyword[return] identifier[Response] ( literal[string] % identifier[str] ( identifier[err] ), identifier[status] = identifier[err] . identifier[response] . identifier[status_code] )
identifier[json_response] = identifier[json] . identifier[dumps] ({ literal[string] : identifier[movies] })
identifier[redis_conn] . identifier[setex] ( identifier[redis_key] , identifier[app] . identifier[config] [ literal[string] ], identifier[json_response] )
keyword[return] identifier[Response] ( identifier[json_response] ) | def search():
""" Search a movie on TMDB.
"""
redis_key = 's_%s' % request.args['query'].lower()
cached = redis_ro_conn.get(redis_key)
if cached:
return Response(cached) # depends on [control=['if'], data=[]]
else:
try:
found = get_on_tmdb(u'/search/movie', query=request.args['query'])
movies = []
for movie in found['results']:
cast = get_on_tmdb(u'/movie/%s/casts' % movie['id'])
year = datetime.strptime(movie['release_date'], '%Y-%m-%d').year if movie['release_date'] else None
movies.append({'title': movie['original_title'], 'directors': [x['name'] for x in cast['crew'] if x['department'] == 'Directing' and x['job'] == 'Director'], 'year': year, '_tmdb_id': movie['id']}) # depends on [control=['for'], data=['movie']] # depends on [control=['try'], data=[]]
except requests.HTTPError as err:
return Response('TMDB API error: %s' % str(err), status=err.response.status_code) # depends on [control=['except'], data=['err']]
json_response = json.dumps({'movies': movies})
redis_conn.setex(redis_key, app.config['CACHE_TTL'], json_response)
return Response(json_response) |
def next(self) -> mx.io.DataBatch:
"""
Returns the next batch from the data iterator.
"""
if not self.iter_next():
raise StopIteration
i, j = self.batch_indices[self.curr_batch_index]
self.curr_batch_index += 1
batch_size = self.bucket_batch_sizes[i].batch_size
source = self.data.source[i][j:j + batch_size]
target = self.data.target[i][j:j + batch_size]
data = [source, target]
label = [self.data.label[i][j:j + batch_size]]
provide_data = [mx.io.DataDesc(name=n, shape=x.shape, layout=C.BATCH_MAJOR) for n, x in
zip(self.data_names, data)]
provide_label = [mx.io.DataDesc(name=n, shape=x.shape, layout=C.BATCH_MAJOR) for n, x in
zip(self.label_names, label)]
# TODO: num pad examples is not set here if fillup policy would be padding
return mx.io.DataBatch(data, label,
pad=0, index=None, bucket_key=self.buckets[i],
provide_data=provide_data, provide_label=provide_label) | def function[next, parameter[self]]:
constant[
Returns the next batch from the data iterator.
]
if <ast.UnaryOp object at 0x7da1b1db3850> begin[:]
<ast.Raise object at 0x7da1b1db38e0>
<ast.Tuple object at 0x7da1b1db3100> assign[=] call[name[self].batch_indices][name[self].curr_batch_index]
<ast.AugAssign object at 0x7da1b1db2f50>
variable[batch_size] assign[=] call[name[self].bucket_batch_sizes][name[i]].batch_size
variable[source] assign[=] call[call[name[self].data.source][name[i]]][<ast.Slice object at 0x7da1b1db3340>]
variable[target] assign[=] call[call[name[self].data.target][name[i]]][<ast.Slice object at 0x7da1b1db2e90>]
variable[data] assign[=] list[[<ast.Name object at 0x7da1b1d99750>, <ast.Name object at 0x7da1b1d99930>]]
variable[label] assign[=] list[[<ast.Subscript object at 0x7da1b1d992a0>]]
variable[provide_data] assign[=] <ast.ListComp object at 0x7da1b1d99870>
variable[provide_label] assign[=] <ast.ListComp object at 0x7da1b1d998a0>
return[call[name[mx].io.DataBatch, parameter[name[data], name[label]]]] | keyword[def] identifier[next] ( identifier[self] )-> identifier[mx] . identifier[io] . identifier[DataBatch] :
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[iter_next] ():
keyword[raise] identifier[StopIteration]
identifier[i] , identifier[j] = identifier[self] . identifier[batch_indices] [ identifier[self] . identifier[curr_batch_index] ]
identifier[self] . identifier[curr_batch_index] += literal[int]
identifier[batch_size] = identifier[self] . identifier[bucket_batch_sizes] [ identifier[i] ]. identifier[batch_size]
identifier[source] = identifier[self] . identifier[data] . identifier[source] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[batch_size] ]
identifier[target] = identifier[self] . identifier[data] . identifier[target] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[batch_size] ]
identifier[data] =[ identifier[source] , identifier[target] ]
identifier[label] =[ identifier[self] . identifier[data] . identifier[label] [ identifier[i] ][ identifier[j] : identifier[j] + identifier[batch_size] ]]
identifier[provide_data] =[ identifier[mx] . identifier[io] . identifier[DataDesc] ( identifier[name] = identifier[n] , identifier[shape] = identifier[x] . identifier[shape] , identifier[layout] = identifier[C] . identifier[BATCH_MAJOR] ) keyword[for] identifier[n] , identifier[x] keyword[in]
identifier[zip] ( identifier[self] . identifier[data_names] , identifier[data] )]
identifier[provide_label] =[ identifier[mx] . identifier[io] . identifier[DataDesc] ( identifier[name] = identifier[n] , identifier[shape] = identifier[x] . identifier[shape] , identifier[layout] = identifier[C] . identifier[BATCH_MAJOR] ) keyword[for] identifier[n] , identifier[x] keyword[in]
identifier[zip] ( identifier[self] . identifier[label_names] , identifier[label] )]
keyword[return] identifier[mx] . identifier[io] . identifier[DataBatch] ( identifier[data] , identifier[label] ,
identifier[pad] = literal[int] , identifier[index] = keyword[None] , identifier[bucket_key] = identifier[self] . identifier[buckets] [ identifier[i] ],
identifier[provide_data] = identifier[provide_data] , identifier[provide_label] = identifier[provide_label] ) | def next(self) -> mx.io.DataBatch:
"""
Returns the next batch from the data iterator.
"""
if not self.iter_next():
raise StopIteration # depends on [control=['if'], data=[]]
(i, j) = self.batch_indices[self.curr_batch_index]
self.curr_batch_index += 1
batch_size = self.bucket_batch_sizes[i].batch_size
source = self.data.source[i][j:j + batch_size]
target = self.data.target[i][j:j + batch_size]
data = [source, target]
label = [self.data.label[i][j:j + batch_size]]
provide_data = [mx.io.DataDesc(name=n, shape=x.shape, layout=C.BATCH_MAJOR) for (n, x) in zip(self.data_names, data)]
provide_label = [mx.io.DataDesc(name=n, shape=x.shape, layout=C.BATCH_MAJOR) for (n, x) in zip(self.label_names, label)]
# TODO: num pad examples is not set here if fillup policy would be padding
return mx.io.DataBatch(data, label, pad=0, index=None, bucket_key=self.buckets[i], provide_data=provide_data, provide_label=provide_label) |
def get_password_gui(device, options):
"""Get the password to unlock a device from GUI."""
text = _('Enter password for {0.device_presentation}: ', device)
try:
return password_dialog(device.id_uuid, 'udiskie', text, options)
except RuntimeError:
return None | def function[get_password_gui, parameter[device, options]]:
constant[Get the password to unlock a device from GUI.]
variable[text] assign[=] call[name[_], parameter[constant[Enter password for {0.device_presentation}: ], name[device]]]
<ast.Try object at 0x7da207f02bc0> | keyword[def] identifier[get_password_gui] ( identifier[device] , identifier[options] ):
literal[string]
identifier[text] = identifier[_] ( literal[string] , identifier[device] )
keyword[try] :
keyword[return] identifier[password_dialog] ( identifier[device] . identifier[id_uuid] , literal[string] , identifier[text] , identifier[options] )
keyword[except] identifier[RuntimeError] :
keyword[return] keyword[None] | def get_password_gui(device, options):
"""Get the password to unlock a device from GUI."""
text = _('Enter password for {0.device_presentation}: ', device)
try:
return password_dialog(device.id_uuid, 'udiskie', text, options) # depends on [control=['try'], data=[]]
except RuntimeError:
return None # depends on [control=['except'], data=[]] |
def _children(self):
"""Yield all direct children of this object."""
if self.declarations:
yield self.declarations
if isinstance(self.condition, CodeExpression):
yield self.condition
if self.increment:
yield self.increment
for codeobj in self.body._children():
yield codeobj | def function[_children, parameter[self]]:
constant[Yield all direct children of this object.]
if name[self].declarations begin[:]
<ast.Yield object at 0x7da18ede5ab0>
if call[name[isinstance], parameter[name[self].condition, name[CodeExpression]]] begin[:]
<ast.Yield object at 0x7da18ede7220>
if name[self].increment begin[:]
<ast.Yield object at 0x7da18ede5e10>
for taget[name[codeobj]] in starred[call[name[self].body._children, parameter[]]] begin[:]
<ast.Yield object at 0x7da18ede7460> | keyword[def] identifier[_children] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[declarations] :
keyword[yield] identifier[self] . identifier[declarations]
keyword[if] identifier[isinstance] ( identifier[self] . identifier[condition] , identifier[CodeExpression] ):
keyword[yield] identifier[self] . identifier[condition]
keyword[if] identifier[self] . identifier[increment] :
keyword[yield] identifier[self] . identifier[increment]
keyword[for] identifier[codeobj] keyword[in] identifier[self] . identifier[body] . identifier[_children] ():
keyword[yield] identifier[codeobj] | def _children(self):
"""Yield all direct children of this object."""
if self.declarations:
yield self.declarations # depends on [control=['if'], data=[]]
if isinstance(self.condition, CodeExpression):
yield self.condition # depends on [control=['if'], data=[]]
if self.increment:
yield self.increment # depends on [control=['if'], data=[]]
for codeobj in self.body._children():
yield codeobj # depends on [control=['for'], data=['codeobj']] |
def circle(radius=None, center=None, **kwargs):
"""
Create a Path2D containing a single or multiple rectangles
with the specified bounds.
Parameters
--------------
bounds : (2, 2) float, or (m, 2, 2) float
Minimum XY, Maximum XY
Returns
-------------
rect : Path2D
Path containing specified rectangles
"""
from .path import Path2D
if center is None:
center = [0.0, 0.0]
else:
center = np.asanyarray(center, dtype=np.float64)
if radius is None:
radius = 1.0
else:
radius = float(radius)
# (3, 2) float, points on arc
three = arc.to_threepoint(angles=[0, np.pi],
center=center,
radius=radius) + center
result = Path2D(entities=[entities.Arc(points=np.arange(3), closed=True)],
vertices=three,
**kwargs)
return result | def function[circle, parameter[radius, center]]:
constant[
Create a Path2D containing a single or multiple rectangles
with the specified bounds.
Parameters
--------------
bounds : (2, 2) float, or (m, 2, 2) float
Minimum XY, Maximum XY
Returns
-------------
rect : Path2D
Path containing specified rectangles
]
from relative_module[path] import module[Path2D]
if compare[name[center] is constant[None]] begin[:]
variable[center] assign[=] list[[<ast.Constant object at 0x7da1b22d4910>, <ast.Constant object at 0x7da1b22d58d0>]]
if compare[name[radius] is constant[None]] begin[:]
variable[radius] assign[=] constant[1.0]
variable[three] assign[=] binary_operation[call[name[arc].to_threepoint, parameter[]] + name[center]]
variable[result] assign[=] call[name[Path2D], parameter[]]
return[name[result]] | keyword[def] identifier[circle] ( identifier[radius] = keyword[None] , identifier[center] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[from] . identifier[path] keyword[import] identifier[Path2D]
keyword[if] identifier[center] keyword[is] keyword[None] :
identifier[center] =[ literal[int] , literal[int] ]
keyword[else] :
identifier[center] = identifier[np] . identifier[asanyarray] ( identifier[center] , identifier[dtype] = identifier[np] . identifier[float64] )
keyword[if] identifier[radius] keyword[is] keyword[None] :
identifier[radius] = literal[int]
keyword[else] :
identifier[radius] = identifier[float] ( identifier[radius] )
identifier[three] = identifier[arc] . identifier[to_threepoint] ( identifier[angles] =[ literal[int] , identifier[np] . identifier[pi] ],
identifier[center] = identifier[center] ,
identifier[radius] = identifier[radius] )+ identifier[center]
identifier[result] = identifier[Path2D] ( identifier[entities] =[ identifier[entities] . identifier[Arc] ( identifier[points] = identifier[np] . identifier[arange] ( literal[int] ), identifier[closed] = keyword[True] )],
identifier[vertices] = identifier[three] ,
** identifier[kwargs] )
keyword[return] identifier[result] | def circle(radius=None, center=None, **kwargs):
"""
Create a Path2D containing a single or multiple rectangles
with the specified bounds.
Parameters
--------------
bounds : (2, 2) float, or (m, 2, 2) float
Minimum XY, Maximum XY
Returns
-------------
rect : Path2D
Path containing specified rectangles
"""
from .path import Path2D
if center is None:
center = [0.0, 0.0] # depends on [control=['if'], data=['center']]
else:
center = np.asanyarray(center, dtype=np.float64)
if radius is None:
radius = 1.0 # depends on [control=['if'], data=['radius']]
else:
radius = float(radius)
# (3, 2) float, points on arc
three = arc.to_threepoint(angles=[0, np.pi], center=center, radius=radius) + center
result = Path2D(entities=[entities.Arc(points=np.arange(3), closed=True)], vertices=three, **kwargs)
return result |
def add_cell_params(self, params, pos=None):
"""
Add cell of Python parameters
:param params: parameters to add
:return:
"""
self.params = params
cell_str = '# Parameters:\n'
for k, v in params.items():
cell_str += "{} = {}\n".format(k, repr(v))
self.add_cell_code(cell_str, pos) | def function[add_cell_params, parameter[self, params, pos]]:
constant[
Add cell of Python parameters
:param params: parameters to add
:return:
]
name[self].params assign[=] name[params]
variable[cell_str] assign[=] constant[# Parameters:
]
for taget[tuple[[<ast.Name object at 0x7da1b0cf63b0>, <ast.Name object at 0x7da1b0cf6bf0>]]] in starred[call[name[params].items, parameter[]]] begin[:]
<ast.AugAssign object at 0x7da1b0b37250>
call[name[self].add_cell_code, parameter[name[cell_str], name[pos]]] | keyword[def] identifier[add_cell_params] ( identifier[self] , identifier[params] , identifier[pos] = keyword[None] ):
literal[string]
identifier[self] . identifier[params] = identifier[params]
identifier[cell_str] = literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params] . identifier[items] ():
identifier[cell_str] += literal[string] . identifier[format] ( identifier[k] , identifier[repr] ( identifier[v] ))
identifier[self] . identifier[add_cell_code] ( identifier[cell_str] , identifier[pos] ) | def add_cell_params(self, params, pos=None):
"""
Add cell of Python parameters
:param params: parameters to add
:return:
"""
self.params = params
cell_str = '# Parameters:\n'
for (k, v) in params.items():
cell_str += '{} = {}\n'.format(k, repr(v)) # depends on [control=['for'], data=[]]
self.add_cell_code(cell_str, pos) |
def make_key(table_name, objid):
"""Create an object key for storage."""
key = datastore.Key()
path = key.path_element.add()
path.kind = table_name
path.name = str(objid)
return key | def function[make_key, parameter[table_name, objid]]:
constant[Create an object key for storage.]
variable[key] assign[=] call[name[datastore].Key, parameter[]]
variable[path] assign[=] call[name[key].path_element.add, parameter[]]
name[path].kind assign[=] name[table_name]
name[path].name assign[=] call[name[str], parameter[name[objid]]]
return[name[key]] | keyword[def] identifier[make_key] ( identifier[table_name] , identifier[objid] ):
literal[string]
identifier[key] = identifier[datastore] . identifier[Key] ()
identifier[path] = identifier[key] . identifier[path_element] . identifier[add] ()
identifier[path] . identifier[kind] = identifier[table_name]
identifier[path] . identifier[name] = identifier[str] ( identifier[objid] )
keyword[return] identifier[key] | def make_key(table_name, objid):
"""Create an object key for storage."""
key = datastore.Key()
path = key.path_element.add()
path.kind = table_name
path.name = str(objid)
return key |
def add_auth_attempt(self, auth_type, successful, **kwargs):
"""
:param username:
:param password:
:param auth_type: possible values:
plain: plaintext username/password
:return:
"""
entry = {'timestamp': datetime.utcnow(),
'auth': auth_type,
'id': uuid.uuid4(),
'successful': successful}
log_string = ''
for key, value in kwargs.iteritems():
if key == 'challenge' or key == 'response':
entry[key] = repr(value)
else:
entry[key] = value
log_string += '{0}:{1}, '.format(key, value)
self.login_attempts.append(entry) | def function[add_auth_attempt, parameter[self, auth_type, successful]]:
constant[
:param username:
:param password:
:param auth_type: possible values:
plain: plaintext username/password
:return:
]
variable[entry] assign[=] dictionary[[<ast.Constant object at 0x7da1b11ed780>, <ast.Constant object at 0x7da1b11eda20>, <ast.Constant object at 0x7da1b11ec280>, <ast.Constant object at 0x7da1b11ec2b0>], [<ast.Call object at 0x7da1b11ec1c0>, <ast.Name object at 0x7da1b11ec2e0>, <ast.Call object at 0x7da1b11ec160>, <ast.Name object at 0x7da1b11ed8d0>]]
variable[log_string] assign[=] constant[]
for taget[tuple[[<ast.Name object at 0x7da1b11ee110>, <ast.Name object at 0x7da1b11ee080>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b11ee230> begin[:]
call[name[entry]][name[key]] assign[=] call[name[repr], parameter[name[value]]]
call[name[self].login_attempts.append, parameter[name[entry]]] | keyword[def] identifier[add_auth_attempt] ( identifier[self] , identifier[auth_type] , identifier[successful] ,** identifier[kwargs] ):
literal[string]
identifier[entry] ={ literal[string] : identifier[datetime] . identifier[utcnow] (),
literal[string] : identifier[auth_type] ,
literal[string] : identifier[uuid] . identifier[uuid4] (),
literal[string] : identifier[successful] }
identifier[log_string] = literal[string]
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[kwargs] . identifier[iteritems] ():
keyword[if] identifier[key] == literal[string] keyword[or] identifier[key] == literal[string] :
identifier[entry] [ identifier[key] ]= identifier[repr] ( identifier[value] )
keyword[else] :
identifier[entry] [ identifier[key] ]= identifier[value]
identifier[log_string] += literal[string] . identifier[format] ( identifier[key] , identifier[value] )
identifier[self] . identifier[login_attempts] . identifier[append] ( identifier[entry] ) | def add_auth_attempt(self, auth_type, successful, **kwargs):
"""
:param username:
:param password:
:param auth_type: possible values:
plain: plaintext username/password
:return:
"""
entry = {'timestamp': datetime.utcnow(), 'auth': auth_type, 'id': uuid.uuid4(), 'successful': successful}
log_string = ''
for (key, value) in kwargs.iteritems():
if key == 'challenge' or key == 'response':
entry[key] = repr(value) # depends on [control=['if'], data=[]]
else:
entry[key] = value
log_string += '{0}:{1}, '.format(key, value) # depends on [control=['for'], data=[]]
self.login_attempts.append(entry) |
def _raw_request(self, method_name, region, url, query_params):
"""
Sends a request through the BaseApi instance provided, injecting the provided endpoint_name
into the method call, so the caller doesn't have to.
:param string method_name: The name of the calling method
:param string region: The region to execute this request on
:param string url: The full URL to the method being requested.
:param dict query_params: Query parameters to be provided in the HTTP request
"""
return self._base_api.raw_request(
self._endpoint_name, method_name, region, url, query_params
) | def function[_raw_request, parameter[self, method_name, region, url, query_params]]:
constant[
Sends a request through the BaseApi instance provided, injecting the provided endpoint_name
into the method call, so the caller doesn't have to.
:param string method_name: The name of the calling method
:param string region: The region to execute this request on
:param string url: The full URL to the method being requested.
:param dict query_params: Query parameters to be provided in the HTTP request
]
return[call[name[self]._base_api.raw_request, parameter[name[self]._endpoint_name, name[method_name], name[region], name[url], name[query_params]]]] | keyword[def] identifier[_raw_request] ( identifier[self] , identifier[method_name] , identifier[region] , identifier[url] , identifier[query_params] ):
literal[string]
keyword[return] identifier[self] . identifier[_base_api] . identifier[raw_request] (
identifier[self] . identifier[_endpoint_name] , identifier[method_name] , identifier[region] , identifier[url] , identifier[query_params]
) | def _raw_request(self, method_name, region, url, query_params):
"""
Sends a request through the BaseApi instance provided, injecting the provided endpoint_name
into the method call, so the caller doesn't have to.
:param string method_name: The name of the calling method
:param string region: The region to execute this request on
:param string url: The full URL to the method being requested.
:param dict query_params: Query parameters to be provided in the HTTP request
"""
return self._base_api.raw_request(self._endpoint_name, method_name, region, url, query_params) |
def pipe_createrss(context=None, _INPUT=None, conf=None, **kwargs):
"""An operator that converts a source into an RSS stream. Not loopable.
"""
conf = DotDict(conf)
for item in _INPUT:
item = DotDict(item)
yield {
value: item.get(conf.get(key, **kwargs))
for key, value in RSS_FIELDS.items()} | def function[pipe_createrss, parameter[context, _INPUT, conf]]:
constant[An operator that converts a source into an RSS stream. Not loopable.
]
variable[conf] assign[=] call[name[DotDict], parameter[name[conf]]]
for taget[name[item]] in starred[name[_INPUT]] begin[:]
variable[item] assign[=] call[name[DotDict], parameter[name[item]]]
<ast.Yield object at 0x7da1b045cbb0> | keyword[def] identifier[pipe_createrss] ( identifier[context] = keyword[None] , identifier[_INPUT] = keyword[None] , identifier[conf] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[conf] = identifier[DotDict] ( identifier[conf] )
keyword[for] identifier[item] keyword[in] identifier[_INPUT] :
identifier[item] = identifier[DotDict] ( identifier[item] )
keyword[yield] {
identifier[value] : identifier[item] . identifier[get] ( identifier[conf] . identifier[get] ( identifier[key] ,** identifier[kwargs] ))
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[RSS_FIELDS] . identifier[items] ()} | def pipe_createrss(context=None, _INPUT=None, conf=None, **kwargs):
"""An operator that converts a source into an RSS stream. Not loopable.
"""
conf = DotDict(conf)
for item in _INPUT:
item = DotDict(item)
yield {value: item.get(conf.get(key, **kwargs)) for (key, value) in RSS_FIELDS.items()} # depends on [control=['for'], data=['item']] |
def send_success_response(self, msgid, methodname):
"""Send a CIM-XML response message back to the WBEM server that
indicates success."""
resp_xml = cim_xml.CIM(
cim_xml.MESSAGE(
cim_xml.SIMPLEEXPRSP(
cim_xml.EXPMETHODRESPONSE(
methodname),
), # noqa: E123
msgid, IMPLEMENTED_PROTOCOL_VERSION),
IMPLEMENTED_CIM_VERSION, IMPLEMENTED_DTD_VERSION)
resp_body = '<?xml version="1.0" encoding="utf-8" ?>\n' + \
resp_xml.toxml()
if isinstance(resp_body, six.text_type):
resp_body = resp_body.encode("utf-8")
http_code = 200
self.send_response(http_code, http_client.responses.get(http_code, ''))
self.send_header("Content-Type", "text/html")
self.send_header("Content-Length", str(len(resp_body)))
self.send_header("CIMExport", "MethodResponse")
self.end_headers()
self.wfile.write(resp_body) | def function[send_success_response, parameter[self, msgid, methodname]]:
constant[Send a CIM-XML response message back to the WBEM server that
indicates success.]
variable[resp_xml] assign[=] call[name[cim_xml].CIM, parameter[call[name[cim_xml].MESSAGE, parameter[call[name[cim_xml].SIMPLEEXPRSP, parameter[call[name[cim_xml].EXPMETHODRESPONSE, parameter[name[methodname]]]]], name[msgid], name[IMPLEMENTED_PROTOCOL_VERSION]]], name[IMPLEMENTED_CIM_VERSION], name[IMPLEMENTED_DTD_VERSION]]]
variable[resp_body] assign[=] binary_operation[constant[<?xml version="1.0" encoding="utf-8" ?>
] + call[name[resp_xml].toxml, parameter[]]]
if call[name[isinstance], parameter[name[resp_body], name[six].text_type]] begin[:]
variable[resp_body] assign[=] call[name[resp_body].encode, parameter[constant[utf-8]]]
variable[http_code] assign[=] constant[200]
call[name[self].send_response, parameter[name[http_code], call[name[http_client].responses.get, parameter[name[http_code], constant[]]]]]
call[name[self].send_header, parameter[constant[Content-Type], constant[text/html]]]
call[name[self].send_header, parameter[constant[Content-Length], call[name[str], parameter[call[name[len], parameter[name[resp_body]]]]]]]
call[name[self].send_header, parameter[constant[CIMExport], constant[MethodResponse]]]
call[name[self].end_headers, parameter[]]
call[name[self].wfile.write, parameter[name[resp_body]]] | keyword[def] identifier[send_success_response] ( identifier[self] , identifier[msgid] , identifier[methodname] ):
literal[string]
identifier[resp_xml] = identifier[cim_xml] . identifier[CIM] (
identifier[cim_xml] . identifier[MESSAGE] (
identifier[cim_xml] . identifier[SIMPLEEXPRSP] (
identifier[cim_xml] . identifier[EXPMETHODRESPONSE] (
identifier[methodname] ),
),
identifier[msgid] , identifier[IMPLEMENTED_PROTOCOL_VERSION] ),
identifier[IMPLEMENTED_CIM_VERSION] , identifier[IMPLEMENTED_DTD_VERSION] )
identifier[resp_body] = literal[string] + identifier[resp_xml] . identifier[toxml] ()
keyword[if] identifier[isinstance] ( identifier[resp_body] , identifier[six] . identifier[text_type] ):
identifier[resp_body] = identifier[resp_body] . identifier[encode] ( literal[string] )
identifier[http_code] = literal[int]
identifier[self] . identifier[send_response] ( identifier[http_code] , identifier[http_client] . identifier[responses] . identifier[get] ( identifier[http_code] , literal[string] ))
identifier[self] . identifier[send_header] ( literal[string] , literal[string] )
identifier[self] . identifier[send_header] ( literal[string] , identifier[str] ( identifier[len] ( identifier[resp_body] )))
identifier[self] . identifier[send_header] ( literal[string] , literal[string] )
identifier[self] . identifier[end_headers] ()
identifier[self] . identifier[wfile] . identifier[write] ( identifier[resp_body] ) | def send_success_response(self, msgid, methodname):
"""Send a CIM-XML response message back to the WBEM server that
indicates success.""" # noqa: E123
resp_xml = cim_xml.CIM(cim_xml.MESSAGE(cim_xml.SIMPLEEXPRSP(cim_xml.EXPMETHODRESPONSE(methodname)), msgid, IMPLEMENTED_PROTOCOL_VERSION), IMPLEMENTED_CIM_VERSION, IMPLEMENTED_DTD_VERSION)
resp_body = '<?xml version="1.0" encoding="utf-8" ?>\n' + resp_xml.toxml()
if isinstance(resp_body, six.text_type):
resp_body = resp_body.encode('utf-8') # depends on [control=['if'], data=[]]
http_code = 200
self.send_response(http_code, http_client.responses.get(http_code, ''))
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', str(len(resp_body)))
self.send_header('CIMExport', 'MethodResponse')
self.end_headers()
self.wfile.write(resp_body) |
def unsubscribe(self, code_list, subtype_list):
"""
取消订阅
:param code_list: 取消订阅的股票代码列表
:param subtype_list: 取消订阅的类型,参见SubType
:return: (ret, err_message)
ret == RET_OK err_message为None
ret != RET_OK err_message为错误描述字符串
"""
ret, msg, code_list, subtype_list = self._check_subscribe_param(code_list, subtype_list)
if ret != RET_OK:
return ret, msg
query_processor = self._get_sync_query_processor(SubscriptionQuery.pack_unsubscribe_req,
SubscriptionQuery.unpack_unsubscribe_rsp)
kargs = {
'code_list': code_list,
'subtype_list': subtype_list,
"conn_id": self.get_sync_conn_id()
}
for subtype in subtype_list:
if subtype not in self._ctx_subscribe:
continue
code_set = self._ctx_subscribe[subtype]
for code in code_list:
if code not in code_set:
continue
code_set.remove(code)
ret_code, msg, _ = query_processor(**kargs)
if ret_code != RET_OK:
return RET_ERROR, msg
ret_code, msg, unpush_req_str = SubscriptionQuery.pack_unpush_req(code_list, subtype_list, self.get_async_conn_id())
if ret_code != RET_OK:
return RET_ERROR, msg
ret_code, msg = self._send_async_req(unpush_req_str)
if ret_code != RET_OK:
return RET_ERROR, msg
return RET_OK, None | def function[unsubscribe, parameter[self, code_list, subtype_list]]:
constant[
取消订阅
:param code_list: 取消订阅的股票代码列表
:param subtype_list: 取消订阅的类型,参见SubType
:return: (ret, err_message)
ret == RET_OK err_message为None
ret != RET_OK err_message为错误描述字符串
]
<ast.Tuple object at 0x7da18bc724a0> assign[=] call[name[self]._check_subscribe_param, parameter[name[code_list], name[subtype_list]]]
if compare[name[ret] not_equal[!=] name[RET_OK]] begin[:]
return[tuple[[<ast.Name object at 0x7da18bc72710>, <ast.Name object at 0x7da18bc70790>]]]
variable[query_processor] assign[=] call[name[self]._get_sync_query_processor, parameter[name[SubscriptionQuery].pack_unsubscribe_req, name[SubscriptionQuery].unpack_unsubscribe_rsp]]
variable[kargs] assign[=] dictionary[[<ast.Constant object at 0x7da18bc70a60>, <ast.Constant object at 0x7da18bc70070>, <ast.Constant object at 0x7da18bc72a40>], [<ast.Name object at 0x7da18bc70100>, <ast.Name object at 0x7da18bc73f10>, <ast.Call object at 0x7da18bc71240>]]
for taget[name[subtype]] in starred[name[subtype_list]] begin[:]
if compare[name[subtype] <ast.NotIn object at 0x7da2590d7190> name[self]._ctx_subscribe] begin[:]
continue
variable[code_set] assign[=] call[name[self]._ctx_subscribe][name[subtype]]
for taget[name[code]] in starred[name[code_list]] begin[:]
if compare[name[code] <ast.NotIn object at 0x7da2590d7190> name[code_set]] begin[:]
continue
call[name[code_set].remove, parameter[name[code]]]
<ast.Tuple object at 0x7da1b07bf370> assign[=] call[name[query_processor], parameter[]]
if compare[name[ret_code] not_equal[!=] name[RET_OK]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b07bc910>, <ast.Name object at 0x7da1b07bd8a0>]]]
<ast.Tuple object at 0x7da1b07bdd80> assign[=] call[name[SubscriptionQuery].pack_unpush_req, parameter[name[code_list], name[subtype_list], call[name[self].get_async_conn_id, parameter[]]]]
if compare[name[ret_code] not_equal[!=] name[RET_OK]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b07bfa30>, <ast.Name object at 0x7da1b07bc5e0>]]]
<ast.Tuple object at 0x7da1b07bdc00> assign[=] call[name[self]._send_async_req, parameter[name[unpush_req_str]]]
if compare[name[ret_code] not_equal[!=] name[RET_OK]] begin[:]
return[tuple[[<ast.Name object at 0x7da1b07bd720>, <ast.Name object at 0x7da1b07bc6d0>]]]
return[tuple[[<ast.Name object at 0x7da1b07bdde0>, <ast.Constant object at 0x7da1b07bd6c0>]]] | keyword[def] identifier[unsubscribe] ( identifier[self] , identifier[code_list] , identifier[subtype_list] ):
literal[string]
identifier[ret] , identifier[msg] , identifier[code_list] , identifier[subtype_list] = identifier[self] . identifier[_check_subscribe_param] ( identifier[code_list] , identifier[subtype_list] )
keyword[if] identifier[ret] != identifier[RET_OK] :
keyword[return] identifier[ret] , identifier[msg]
identifier[query_processor] = identifier[self] . identifier[_get_sync_query_processor] ( identifier[SubscriptionQuery] . identifier[pack_unsubscribe_req] ,
identifier[SubscriptionQuery] . identifier[unpack_unsubscribe_rsp] )
identifier[kargs] ={
literal[string] : identifier[code_list] ,
literal[string] : identifier[subtype_list] ,
literal[string] : identifier[self] . identifier[get_sync_conn_id] ()
}
keyword[for] identifier[subtype] keyword[in] identifier[subtype_list] :
keyword[if] identifier[subtype] keyword[not] keyword[in] identifier[self] . identifier[_ctx_subscribe] :
keyword[continue]
identifier[code_set] = identifier[self] . identifier[_ctx_subscribe] [ identifier[subtype] ]
keyword[for] identifier[code] keyword[in] identifier[code_list] :
keyword[if] identifier[code] keyword[not] keyword[in] identifier[code_set] :
keyword[continue]
identifier[code_set] . identifier[remove] ( identifier[code] )
identifier[ret_code] , identifier[msg] , identifier[_] = identifier[query_processor] (** identifier[kargs] )
keyword[if] identifier[ret_code] != identifier[RET_OK] :
keyword[return] identifier[RET_ERROR] , identifier[msg]
identifier[ret_code] , identifier[msg] , identifier[unpush_req_str] = identifier[SubscriptionQuery] . identifier[pack_unpush_req] ( identifier[code_list] , identifier[subtype_list] , identifier[self] . identifier[get_async_conn_id] ())
keyword[if] identifier[ret_code] != identifier[RET_OK] :
keyword[return] identifier[RET_ERROR] , identifier[msg]
identifier[ret_code] , identifier[msg] = identifier[self] . identifier[_send_async_req] ( identifier[unpush_req_str] )
keyword[if] identifier[ret_code] != identifier[RET_OK] :
keyword[return] identifier[RET_ERROR] , identifier[msg]
keyword[return] identifier[RET_OK] , keyword[None] | def unsubscribe(self, code_list, subtype_list):
"""
取消订阅
:param code_list: 取消订阅的股票代码列表
:param subtype_list: 取消订阅的类型,参见SubType
:return: (ret, err_message)
ret == RET_OK err_message为None
ret != RET_OK err_message为错误描述字符串
"""
(ret, msg, code_list, subtype_list) = self._check_subscribe_param(code_list, subtype_list)
if ret != RET_OK:
return (ret, msg) # depends on [control=['if'], data=['ret']]
query_processor = self._get_sync_query_processor(SubscriptionQuery.pack_unsubscribe_req, SubscriptionQuery.unpack_unsubscribe_rsp)
kargs = {'code_list': code_list, 'subtype_list': subtype_list, 'conn_id': self.get_sync_conn_id()}
for subtype in subtype_list:
if subtype not in self._ctx_subscribe:
continue # depends on [control=['if'], data=[]]
code_set = self._ctx_subscribe[subtype]
for code in code_list:
if code not in code_set:
continue # depends on [control=['if'], data=[]]
code_set.remove(code) # depends on [control=['for'], data=['code']] # depends on [control=['for'], data=['subtype']]
(ret_code, msg, _) = query_processor(**kargs)
if ret_code != RET_OK:
return (RET_ERROR, msg) # depends on [control=['if'], data=[]]
(ret_code, msg, unpush_req_str) = SubscriptionQuery.pack_unpush_req(code_list, subtype_list, self.get_async_conn_id())
if ret_code != RET_OK:
return (RET_ERROR, msg) # depends on [control=['if'], data=[]]
(ret_code, msg) = self._send_async_req(unpush_req_str)
if ret_code != RET_OK:
return (RET_ERROR, msg) # depends on [control=['if'], data=[]]
return (RET_OK, None) |
def _load(self, tree):
""" Run a LOAD statement """
filename = tree.load_file[0]
if filename[0] in ['"', "'"]:
filename = unwrap(filename)
if not os.path.exists(filename):
raise Exception("No such file %r" % filename)
batch = self.connection.batch_write(tree.table)
count = 0
with batch:
remainder, ext = os.path.splitext(filename)
if ext.lower() in [".gz", ".gzip"]:
ext = os.path.splitext(remainder)[1]
opened = gzip.open(filename, "rb")
else:
opened = open(filename, "r")
with opened as ifile:
if ext.lower() == ".csv":
reader = csv.DictReader(ifile)
for row in reader:
batch.put(row)
count += 1
elif ext.lower() == ".json":
for row in ifile:
batch.put(json.loads(row))
count += 1
else:
try:
while True:
batch.put(pickle.load(ifile))
count += 1
except EOFError:
pass
return count | def function[_load, parameter[self, tree]]:
constant[ Run a LOAD statement ]
variable[filename] assign[=] call[name[tree].load_file][constant[0]]
if compare[call[name[filename]][constant[0]] in list[[<ast.Constant object at 0x7da2046230a0>, <ast.Constant object at 0x7da204622350>]]] begin[:]
variable[filename] assign[=] call[name[unwrap], parameter[name[filename]]]
if <ast.UnaryOp object at 0x7da1b0cb4820> begin[:]
<ast.Raise object at 0x7da1b0cb75b0>
variable[batch] assign[=] call[name[self].connection.batch_write, parameter[name[tree].table]]
variable[count] assign[=] constant[0]
with name[batch] begin[:]
<ast.Tuple object at 0x7da1b0cb4430> assign[=] call[name[os].path.splitext, parameter[name[filename]]]
if compare[call[name[ext].lower, parameter[]] in list[[<ast.Constant object at 0x7da1b0cb7a00>, <ast.Constant object at 0x7da1b26ac670>]]] begin[:]
variable[ext] assign[=] call[call[name[os].path.splitext, parameter[name[remainder]]]][constant[1]]
variable[opened] assign[=] call[name[gzip].open, parameter[name[filename], constant[rb]]]
with name[opened] begin[:]
if compare[call[name[ext].lower, parameter[]] equal[==] constant[.csv]] begin[:]
variable[reader] assign[=] call[name[csv].DictReader, parameter[name[ifile]]]
for taget[name[row]] in starred[name[reader]] begin[:]
call[name[batch].put, parameter[name[row]]]
<ast.AugAssign object at 0x7da2046236d0>
return[name[count]] | keyword[def] identifier[_load] ( identifier[self] , identifier[tree] ):
literal[string]
identifier[filename] = identifier[tree] . identifier[load_file] [ literal[int] ]
keyword[if] identifier[filename] [ literal[int] ] keyword[in] [ literal[string] , literal[string] ]:
identifier[filename] = identifier[unwrap] ( identifier[filename] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ):
keyword[raise] identifier[Exception] ( literal[string] % identifier[filename] )
identifier[batch] = identifier[self] . identifier[connection] . identifier[batch_write] ( identifier[tree] . identifier[table] )
identifier[count] = literal[int]
keyword[with] identifier[batch] :
identifier[remainder] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] )
keyword[if] identifier[ext] . identifier[lower] () keyword[in] [ literal[string] , literal[string] ]:
identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[remainder] )[ literal[int] ]
identifier[opened] = identifier[gzip] . identifier[open] ( identifier[filename] , literal[string] )
keyword[else] :
identifier[opened] = identifier[open] ( identifier[filename] , literal[string] )
keyword[with] identifier[opened] keyword[as] identifier[ifile] :
keyword[if] identifier[ext] . identifier[lower] ()== literal[string] :
identifier[reader] = identifier[csv] . identifier[DictReader] ( identifier[ifile] )
keyword[for] identifier[row] keyword[in] identifier[reader] :
identifier[batch] . identifier[put] ( identifier[row] )
identifier[count] += literal[int]
keyword[elif] identifier[ext] . identifier[lower] ()== literal[string] :
keyword[for] identifier[row] keyword[in] identifier[ifile] :
identifier[batch] . identifier[put] ( identifier[json] . identifier[loads] ( identifier[row] ))
identifier[count] += literal[int]
keyword[else] :
keyword[try] :
keyword[while] keyword[True] :
identifier[batch] . identifier[put] ( identifier[pickle] . identifier[load] ( identifier[ifile] ))
identifier[count] += literal[int]
keyword[except] identifier[EOFError] :
keyword[pass]
keyword[return] identifier[count] | def _load(self, tree):
""" Run a LOAD statement """
filename = tree.load_file[0]
if filename[0] in ['"', "'"]:
filename = unwrap(filename) # depends on [control=['if'], data=[]]
if not os.path.exists(filename):
raise Exception('No such file %r' % filename) # depends on [control=['if'], data=[]]
batch = self.connection.batch_write(tree.table)
count = 0
with batch:
(remainder, ext) = os.path.splitext(filename)
if ext.lower() in ['.gz', '.gzip']:
ext = os.path.splitext(remainder)[1]
opened = gzip.open(filename, 'rb') # depends on [control=['if'], data=[]]
else:
opened = open(filename, 'r')
with opened as ifile:
if ext.lower() == '.csv':
reader = csv.DictReader(ifile)
for row in reader:
batch.put(row)
count += 1 # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]]
elif ext.lower() == '.json':
for row in ifile:
batch.put(json.loads(row))
count += 1 # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]]
else:
try:
while True:
batch.put(pickle.load(ifile))
count += 1 # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except EOFError:
pass # depends on [control=['except'], data=[]] # depends on [control=['with'], data=['ifile']] # depends on [control=['with'], data=[]]
return count |
def close(self):
""" Closes the connection by removing the user from all rooms """
logging.debug('Closing for user {user}'.format(user=self.id.name))
self.id.release_name()
for room in self._rooms.values():
room.disconnect(self) | def function[close, parameter[self]]:
constant[ Closes the connection by removing the user from all rooms ]
call[name[logging].debug, parameter[call[constant[Closing for user {user}].format, parameter[]]]]
call[name[self].id.release_name, parameter[]]
for taget[name[room]] in starred[call[name[self]._rooms.values, parameter[]]] begin[:]
call[name[room].disconnect, parameter[name[self]]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[user] = identifier[self] . identifier[id] . identifier[name] ))
identifier[self] . identifier[id] . identifier[release_name] ()
keyword[for] identifier[room] keyword[in] identifier[self] . identifier[_rooms] . identifier[values] ():
identifier[room] . identifier[disconnect] ( identifier[self] ) | def close(self):
""" Closes the connection by removing the user from all rooms """
logging.debug('Closing for user {user}'.format(user=self.id.name))
self.id.release_name()
for room in self._rooms.values():
room.disconnect(self) # depends on [control=['for'], data=['room']] |
def fuller(target, MA, MB, vA, vB, temperature='pore.temperature',
pressure='pore.pressure'):
r"""
Uses Fuller model to estimate diffusion coefficient for gases from first
principles at conditions of interest
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
MA : float, array_like
Molecular weight of component A [kg/mol]
MB : float, array_like
Molecular weight of component B [kg/mol]
vA: float, array_like
Sum of atomic diffusion volumes for component A
vB: float, array_like
Sum of atomic diffusion volumes for component B
pressure : string
The dictionary key containing the pressure values in Pascals (Pa)
temperature : string
The dictionary key containing the temperature values in Kelvin (K)
"""
T = target[temperature]
P = target[pressure]
MAB = 2*(1.0/MA+1.0/MB)**(-1)
MAB = MAB*1e3
P = P*1e-5
value = 0.00143*T**1.75/(P*(MAB**0.5)*(vA**(1./3)+vB**(1./3))**2)*1e-4
return value | def function[fuller, parameter[target, MA, MB, vA, vB, temperature, pressure]]:
constant[
Uses Fuller model to estimate diffusion coefficient for gases from first
principles at conditions of interest
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
MA : float, array_like
Molecular weight of component A [kg/mol]
MB : float, array_like
Molecular weight of component B [kg/mol]
vA: float, array_like
Sum of atomic diffusion volumes for component A
vB: float, array_like
Sum of atomic diffusion volumes for component B
pressure : string
The dictionary key containing the pressure values in Pascals (Pa)
temperature : string
The dictionary key containing the temperature values in Kelvin (K)
]
variable[T] assign[=] call[name[target]][name[temperature]]
variable[P] assign[=] call[name[target]][name[pressure]]
variable[MAB] assign[=] binary_operation[constant[2] * binary_operation[binary_operation[binary_operation[constant[1.0] / name[MA]] + binary_operation[constant[1.0] / name[MB]]] ** <ast.UnaryOp object at 0x7da18c4cc520>]]
variable[MAB] assign[=] binary_operation[name[MAB] * constant[1000.0]]
variable[P] assign[=] binary_operation[name[P] * constant[1e-05]]
variable[value] assign[=] binary_operation[binary_operation[binary_operation[constant[0.00143] * binary_operation[name[T] ** constant[1.75]]] / binary_operation[binary_operation[name[P] * binary_operation[name[MAB] ** constant[0.5]]] * binary_operation[binary_operation[binary_operation[name[vA] ** binary_operation[constant[1.0] / constant[3]]] + binary_operation[name[vB] ** binary_operation[constant[1.0] / constant[3]]]] ** constant[2]]]] * constant[0.0001]]
return[name[value]] | keyword[def] identifier[fuller] ( identifier[target] , identifier[MA] , identifier[MB] , identifier[vA] , identifier[vB] , identifier[temperature] = literal[string] ,
identifier[pressure] = literal[string] ):
literal[string]
identifier[T] = identifier[target] [ identifier[temperature] ]
identifier[P] = identifier[target] [ identifier[pressure] ]
identifier[MAB] = literal[int] *( literal[int] / identifier[MA] + literal[int] / identifier[MB] )**(- literal[int] )
identifier[MAB] = identifier[MAB] * literal[int]
identifier[P] = identifier[P] * literal[int]
identifier[value] = literal[int] * identifier[T] ** literal[int] /( identifier[P] *( identifier[MAB] ** literal[int] )*( identifier[vA] **( literal[int] / literal[int] )+ identifier[vB] **( literal[int] / literal[int] ))** literal[int] )* literal[int]
keyword[return] identifier[value] | def fuller(target, MA, MB, vA, vB, temperature='pore.temperature', pressure='pore.pressure'):
"""
Uses Fuller model to estimate diffusion coefficient for gases from first
principles at conditions of interest
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
MA : float, array_like
Molecular weight of component A [kg/mol]
MB : float, array_like
Molecular weight of component B [kg/mol]
vA: float, array_like
Sum of atomic diffusion volumes for component A
vB: float, array_like
Sum of atomic diffusion volumes for component B
pressure : string
The dictionary key containing the pressure values in Pascals (Pa)
temperature : string
The dictionary key containing the temperature values in Kelvin (K)
"""
T = target[temperature]
P = target[pressure]
MAB = 2 * (1.0 / MA + 1.0 / MB) ** (-1)
MAB = MAB * 1000.0
P = P * 1e-05
value = 0.00143 * T ** 1.75 / (P * MAB ** 0.5 * (vA ** (1.0 / 3) + vB ** (1.0 / 3)) ** 2) * 0.0001
return value |
def pop_frame(self):
"""
Remove and return the frame at the top of the stack.
:returns: The top frame
:rtype: Frame
:raises Exception: If there are no frames on the stack
"""
self.frames.pop(0)
if len(self.frames) == 0:
raise Exception("stack is exhausted")
return self.frames[0] | def function[pop_frame, parameter[self]]:
constant[
Remove and return the frame at the top of the stack.
:returns: The top frame
:rtype: Frame
:raises Exception: If there are no frames on the stack
]
call[name[self].frames.pop, parameter[constant[0]]]
if compare[call[name[len], parameter[name[self].frames]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da1b14e50c0>
return[call[name[self].frames][constant[0]]] | keyword[def] identifier[pop_frame] ( identifier[self] ):
literal[string]
identifier[self] . identifier[frames] . identifier[pop] ( literal[int] )
keyword[if] identifier[len] ( identifier[self] . identifier[frames] )== literal[int] :
keyword[raise] identifier[Exception] ( literal[string] )
keyword[return] identifier[self] . identifier[frames] [ literal[int] ] | def pop_frame(self):
"""
Remove and return the frame at the top of the stack.
:returns: The top frame
:rtype: Frame
:raises Exception: If there are no frames on the stack
"""
self.frames.pop(0)
if len(self.frames) == 0:
raise Exception('stack is exhausted') # depends on [control=['if'], data=[]]
return self.frames[0] |
async def send(self, message: Message) -> None:
"""
Send ASGI websocket messages, ensuring valid state transitions.
"""
if self.application_state == WebSocketState.CONNECTING:
message_type = message["type"]
assert message_type in {"websocket.accept", "websocket.close"}
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
else:
self.application_state = WebSocketState.CONNECTED
await self._send(message)
elif self.application_state == WebSocketState.CONNECTED:
message_type = message["type"]
assert message_type in {"websocket.send", "websocket.close"}
if message_type == "websocket.close":
self.application_state = WebSocketState.DISCONNECTED
await self._send(message)
else:
raise RuntimeError('Cannot call "send" once a close message has been sent.') | <ast.AsyncFunctionDef object at 0x7da1b000b8e0> | keyword[async] keyword[def] identifier[send] ( identifier[self] , identifier[message] : identifier[Message] )-> keyword[None] :
literal[string]
keyword[if] identifier[self] . identifier[application_state] == identifier[WebSocketState] . identifier[CONNECTING] :
identifier[message_type] = identifier[message] [ literal[string] ]
keyword[assert] identifier[message_type] keyword[in] { literal[string] , literal[string] }
keyword[if] identifier[message_type] == literal[string] :
identifier[self] . identifier[application_state] = identifier[WebSocketState] . identifier[DISCONNECTED]
keyword[else] :
identifier[self] . identifier[application_state] = identifier[WebSocketState] . identifier[CONNECTED]
keyword[await] identifier[self] . identifier[_send] ( identifier[message] )
keyword[elif] identifier[self] . identifier[application_state] == identifier[WebSocketState] . identifier[CONNECTED] :
identifier[message_type] = identifier[message] [ literal[string] ]
keyword[assert] identifier[message_type] keyword[in] { literal[string] , literal[string] }
keyword[if] identifier[message_type] == literal[string] :
identifier[self] . identifier[application_state] = identifier[WebSocketState] . identifier[DISCONNECTED]
keyword[await] identifier[self] . identifier[_send] ( identifier[message] )
keyword[else] :
keyword[raise] identifier[RuntimeError] ( literal[string] ) | async def send(self, message: Message) -> None:
"""
Send ASGI websocket messages, ensuring valid state transitions.
"""
if self.application_state == WebSocketState.CONNECTING:
message_type = message['type']
assert message_type in {'websocket.accept', 'websocket.close'}
if message_type == 'websocket.close':
self.application_state = WebSocketState.DISCONNECTED # depends on [control=['if'], data=[]]
else:
self.application_state = WebSocketState.CONNECTED
await self._send(message) # depends on [control=['if'], data=[]]
elif self.application_state == WebSocketState.CONNECTED:
message_type = message['type']
assert message_type in {'websocket.send', 'websocket.close'}
if message_type == 'websocket.close':
self.application_state = WebSocketState.DISCONNECTED # depends on [control=['if'], data=[]]
await self._send(message) # depends on [control=['if'], data=[]]
else:
raise RuntimeError('Cannot call "send" once a close message has been sent.') |
def matplotlib_to_ginga_cmap(cm, name=None):
"""Convert matplotlib colormap to Ginga's."""
if name is None:
name = cm.name
arr = cm(np.arange(0, min_cmap_len) / np.float(min_cmap_len - 1))
clst = arr[:, 0:3]
return ColorMap(name, clst) | def function[matplotlib_to_ginga_cmap, parameter[cm, name]]:
constant[Convert matplotlib colormap to Ginga's.]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] name[cm].name
variable[arr] assign[=] call[name[cm], parameter[binary_operation[call[name[np].arange, parameter[constant[0], name[min_cmap_len]]] / call[name[np].float, parameter[binary_operation[name[min_cmap_len] - constant[1]]]]]]]
variable[clst] assign[=] call[name[arr]][tuple[[<ast.Slice object at 0x7da1b0d1bdc0>, <ast.Slice object at 0x7da1b0d1bbe0>]]]
return[call[name[ColorMap], parameter[name[name], name[clst]]]] | keyword[def] identifier[matplotlib_to_ginga_cmap] ( identifier[cm] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[cm] . identifier[name]
identifier[arr] = identifier[cm] ( identifier[np] . identifier[arange] ( literal[int] , identifier[min_cmap_len] )/ identifier[np] . identifier[float] ( identifier[min_cmap_len] - literal[int] ))
identifier[clst] = identifier[arr] [:, literal[int] : literal[int] ]
keyword[return] identifier[ColorMap] ( identifier[name] , identifier[clst] ) | def matplotlib_to_ginga_cmap(cm, name=None):
"""Convert matplotlib colormap to Ginga's."""
if name is None:
name = cm.name # depends on [control=['if'], data=['name']]
arr = cm(np.arange(0, min_cmap_len) / np.float(min_cmap_len - 1))
clst = arr[:, 0:3]
return ColorMap(name, clst) |
def update_fmt_with_notebook_options(self, metadata):
"""Update format options with the values in the notebook metadata, and record those
options in the notebook metadata"""
# format options in notebook have precedence over that in fmt
for opt in _VALID_FORMAT_OPTIONS:
if opt in metadata.get('jupytext', {}):
self.fmt.setdefault(opt, metadata['jupytext'][opt])
if opt in self.fmt:
metadata.setdefault('jupytext', {}).setdefault(opt, self.fmt[opt])
# rST to md conversion should happen only once
if metadata.get('jupytext', {}).get('rst2md') is True:
metadata['jupytext']['rst2md'] = False | def function[update_fmt_with_notebook_options, parameter[self, metadata]]:
constant[Update format options with the values in the notebook metadata, and record those
options in the notebook metadata]
for taget[name[opt]] in starred[name[_VALID_FORMAT_OPTIONS]] begin[:]
if compare[name[opt] in call[name[metadata].get, parameter[constant[jupytext], dictionary[[], []]]]] begin[:]
call[name[self].fmt.setdefault, parameter[name[opt], call[call[name[metadata]][constant[jupytext]]][name[opt]]]]
if compare[name[opt] in name[self].fmt] begin[:]
call[call[name[metadata].setdefault, parameter[constant[jupytext], dictionary[[], []]]].setdefault, parameter[name[opt], call[name[self].fmt][name[opt]]]]
if compare[call[call[name[metadata].get, parameter[constant[jupytext], dictionary[[], []]]].get, parameter[constant[rst2md]]] is constant[True]] begin[:]
call[call[name[metadata]][constant[jupytext]]][constant[rst2md]] assign[=] constant[False] | keyword[def] identifier[update_fmt_with_notebook_options] ( identifier[self] , identifier[metadata] ):
literal[string]
keyword[for] identifier[opt] keyword[in] identifier[_VALID_FORMAT_OPTIONS] :
keyword[if] identifier[opt] keyword[in] identifier[metadata] . identifier[get] ( literal[string] ,{}):
identifier[self] . identifier[fmt] . identifier[setdefault] ( identifier[opt] , identifier[metadata] [ literal[string] ][ identifier[opt] ])
keyword[if] identifier[opt] keyword[in] identifier[self] . identifier[fmt] :
identifier[metadata] . identifier[setdefault] ( literal[string] ,{}). identifier[setdefault] ( identifier[opt] , identifier[self] . identifier[fmt] [ identifier[opt] ])
keyword[if] identifier[metadata] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ) keyword[is] keyword[True] :
identifier[metadata] [ literal[string] ][ literal[string] ]= keyword[False] | def update_fmt_with_notebook_options(self, metadata):
"""Update format options with the values in the notebook metadata, and record those
options in the notebook metadata"""
# format options in notebook have precedence over that in fmt
for opt in _VALID_FORMAT_OPTIONS:
if opt in metadata.get('jupytext', {}):
self.fmt.setdefault(opt, metadata['jupytext'][opt]) # depends on [control=['if'], data=['opt']]
if opt in self.fmt:
metadata.setdefault('jupytext', {}).setdefault(opt, self.fmt[opt]) # depends on [control=['if'], data=['opt']] # depends on [control=['for'], data=['opt']]
# rST to md conversion should happen only once
if metadata.get('jupytext', {}).get('rst2md') is True:
metadata['jupytext']['rst2md'] = False # depends on [control=['if'], data=[]] |
def send_messages(cls, http_request, message_requests):
"""
Deduplicate any outgoing message requests, and send the remainder.
Args:
http_request: The HTTP request in whose response we want to embed the messages
message_requests: A list of undeduplicated messages in the form of tuples of message type
and text- for example, ('error', 'Something went wrong')
"""
deduplicated_messages = set(message_requests)
for msg_type, text in deduplicated_messages:
message_function = getattr(messages, msg_type)
message_function(http_request, text) | def function[send_messages, parameter[cls, http_request, message_requests]]:
constant[
Deduplicate any outgoing message requests, and send the remainder.
Args:
http_request: The HTTP request in whose response we want to embed the messages
message_requests: A list of undeduplicated messages in the form of tuples of message type
and text- for example, ('error', 'Something went wrong')
]
variable[deduplicated_messages] assign[=] call[name[set], parameter[name[message_requests]]]
for taget[tuple[[<ast.Name object at 0x7da1b01267d0>, <ast.Name object at 0x7da1b0124760>]]] in starred[name[deduplicated_messages]] begin[:]
variable[message_function] assign[=] call[name[getattr], parameter[name[messages], name[msg_type]]]
call[name[message_function], parameter[name[http_request], name[text]]] | keyword[def] identifier[send_messages] ( identifier[cls] , identifier[http_request] , identifier[message_requests] ):
literal[string]
identifier[deduplicated_messages] = identifier[set] ( identifier[message_requests] )
keyword[for] identifier[msg_type] , identifier[text] keyword[in] identifier[deduplicated_messages] :
identifier[message_function] = identifier[getattr] ( identifier[messages] , identifier[msg_type] )
identifier[message_function] ( identifier[http_request] , identifier[text] ) | def send_messages(cls, http_request, message_requests):
"""
Deduplicate any outgoing message requests, and send the remainder.
Args:
http_request: The HTTP request in whose response we want to embed the messages
message_requests: A list of undeduplicated messages in the form of tuples of message type
and text- for example, ('error', 'Something went wrong')
"""
deduplicated_messages = set(message_requests)
for (msg_type, text) in deduplicated_messages:
message_function = getattr(messages, msg_type)
message_function(http_request, text) # depends on [control=['for'], data=[]] |
def produce_fake_hash(x):
"""
Produce random, binary features, totally irrespective of the content of
x, but in the same shape as x.
"""
h = np.random.binomial(1, 0.5, (x.shape[0], 1024))
packed = np.packbits(h, axis=-1).view(np.uint64)
return zounds.ArrayWithUnits(
packed, [x.dimensions[0], zounds.IdentityDimension()]) | def function[produce_fake_hash, parameter[x]]:
constant[
Produce random, binary features, totally irrespective of the content of
x, but in the same shape as x.
]
variable[h] assign[=] call[name[np].random.binomial, parameter[constant[1], constant[0.5], tuple[[<ast.Subscript object at 0x7da1b19d8fa0>, <ast.Constant object at 0x7da1b19da740>]]]]
variable[packed] assign[=] call[call[name[np].packbits, parameter[name[h]]].view, parameter[name[np].uint64]]
return[call[name[zounds].ArrayWithUnits, parameter[name[packed], list[[<ast.Subscript object at 0x7da1b191cbb0>, <ast.Call object at 0x7da1b191d360>]]]]] | keyword[def] identifier[produce_fake_hash] ( identifier[x] ):
literal[string]
identifier[h] = identifier[np] . identifier[random] . identifier[binomial] ( literal[int] , literal[int] ,( identifier[x] . identifier[shape] [ literal[int] ], literal[int] ))
identifier[packed] = identifier[np] . identifier[packbits] ( identifier[h] , identifier[axis] =- literal[int] ). identifier[view] ( identifier[np] . identifier[uint64] )
keyword[return] identifier[zounds] . identifier[ArrayWithUnits] (
identifier[packed] ,[ identifier[x] . identifier[dimensions] [ literal[int] ], identifier[zounds] . identifier[IdentityDimension] ()]) | def produce_fake_hash(x):
"""
Produce random, binary features, totally irrespective of the content of
x, but in the same shape as x.
"""
h = np.random.binomial(1, 0.5, (x.shape[0], 1024))
packed = np.packbits(h, axis=-1).view(np.uint64)
return zounds.ArrayWithUnits(packed, [x.dimensions[0], zounds.IdentityDimension()]) |
def open_with_auth(url, opener=urllib.request.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url)
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise http_client.InvalidURL("nonnumeric port: ''")
if scheme in ('http', 'https'):
auth, host = urllib.parse.splituser(netloc)
else:
auth = None
if not auth:
cred = PyPIConfig().find_credential(url)
if cred:
auth = str(cred)
info = cred.username, url
log.info('Authenticating as %s for %s (from .pypirc)', *info)
if auth:
auth = "Basic " + _encode_auth(auth)
parts = scheme, host, path, params, query, frag
new_url = urllib.parse.urlunparse(parts)
request = urllib.request.Request(new_url)
request.add_header("Authorization", auth)
else:
request = urllib.request.Request(url)
request.add_header('User-Agent', user_agent)
fp = opener(request)
if auth:
# Put authentication info back into request URL if same host,
# so that links found on the page will work
s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url)
if s2 == scheme and h2 == host:
parts = s2, netloc, path2, param2, query2, frag2
fp.url = urllib.parse.urlunparse(parts)
return fp | def function[open_with_auth, parameter[url, opener]]:
constant[Open a urllib2 request, handling HTTP authentication]
<ast.Tuple object at 0x7da20e9557b0> assign[=] call[name[urllib].parse.urlparse, parameter[name[url]]]
if call[name[netloc].endswith, parameter[constant[:]]] begin[:]
<ast.Raise object at 0x7da20e956d10>
if compare[name[scheme] in tuple[[<ast.Constant object at 0x7da18bc72200>, <ast.Constant object at 0x7da18bc71750>]]] begin[:]
<ast.Tuple object at 0x7da18bc720e0> assign[=] call[name[urllib].parse.splituser, parameter[name[netloc]]]
if <ast.UnaryOp object at 0x7da18bc701c0> begin[:]
variable[cred] assign[=] call[call[name[PyPIConfig], parameter[]].find_credential, parameter[name[url]]]
if name[cred] begin[:]
variable[auth] assign[=] call[name[str], parameter[name[cred]]]
variable[info] assign[=] tuple[[<ast.Attribute object at 0x7da207f02680>, <ast.Name object at 0x7da207f00220>]]
call[name[log].info, parameter[constant[Authenticating as %s for %s (from .pypirc)], <ast.Starred object at 0x7da207f03400>]]
if name[auth] begin[:]
variable[auth] assign[=] binary_operation[constant[Basic ] + call[name[_encode_auth], parameter[name[auth]]]]
variable[parts] assign[=] tuple[[<ast.Name object at 0x7da207f01540>, <ast.Name object at 0x7da207f01480>, <ast.Name object at 0x7da207f00cd0>, <ast.Name object at 0x7da207f02830>, <ast.Name object at 0x7da207f024a0>, <ast.Name object at 0x7da207f00d30>]]
variable[new_url] assign[=] call[name[urllib].parse.urlunparse, parameter[name[parts]]]
variable[request] assign[=] call[name[urllib].request.Request, parameter[name[new_url]]]
call[name[request].add_header, parameter[constant[Authorization], name[auth]]]
call[name[request].add_header, parameter[constant[User-Agent], name[user_agent]]]
variable[fp] assign[=] call[name[opener], parameter[name[request]]]
if name[auth] begin[:]
<ast.Tuple object at 0x7da204346920> assign[=] call[name[urllib].parse.urlparse, parameter[name[fp].url]]
if <ast.BoolOp object at 0x7da2043478e0> begin[:]
variable[parts] assign[=] tuple[[<ast.Name object at 0x7da20c7cb7c0>, <ast.Name object at 0x7da20c7c80a0>, <ast.Name object at 0x7da20c7c8e80>, <ast.Name object at 0x7da20c7cb0a0>, <ast.Name object at 0x7da20c7caf50>, <ast.Name object at 0x7da20c7ca290>]]
name[fp].url assign[=] call[name[urllib].parse.urlunparse, parameter[name[parts]]]
return[name[fp]] | keyword[def] identifier[open_with_auth] ( identifier[url] , identifier[opener] = identifier[urllib] . identifier[request] . identifier[urlopen] ):
literal[string]
identifier[scheme] , identifier[netloc] , identifier[path] , identifier[params] , identifier[query] , identifier[frag] = identifier[urllib] . identifier[parse] . identifier[urlparse] ( identifier[url] )
keyword[if] identifier[netloc] . identifier[endswith] ( literal[string] ):
keyword[raise] identifier[http_client] . identifier[InvalidURL] ( literal[string] )
keyword[if] identifier[scheme] keyword[in] ( literal[string] , literal[string] ):
identifier[auth] , identifier[host] = identifier[urllib] . identifier[parse] . identifier[splituser] ( identifier[netloc] )
keyword[else] :
identifier[auth] = keyword[None]
keyword[if] keyword[not] identifier[auth] :
identifier[cred] = identifier[PyPIConfig] (). identifier[find_credential] ( identifier[url] )
keyword[if] identifier[cred] :
identifier[auth] = identifier[str] ( identifier[cred] )
identifier[info] = identifier[cred] . identifier[username] , identifier[url]
identifier[log] . identifier[info] ( literal[string] ,* identifier[info] )
keyword[if] identifier[auth] :
identifier[auth] = literal[string] + identifier[_encode_auth] ( identifier[auth] )
identifier[parts] = identifier[scheme] , identifier[host] , identifier[path] , identifier[params] , identifier[query] , identifier[frag]
identifier[new_url] = identifier[urllib] . identifier[parse] . identifier[urlunparse] ( identifier[parts] )
identifier[request] = identifier[urllib] . identifier[request] . identifier[Request] ( identifier[new_url] )
identifier[request] . identifier[add_header] ( literal[string] , identifier[auth] )
keyword[else] :
identifier[request] = identifier[urllib] . identifier[request] . identifier[Request] ( identifier[url] )
identifier[request] . identifier[add_header] ( literal[string] , identifier[user_agent] )
identifier[fp] = identifier[opener] ( identifier[request] )
keyword[if] identifier[auth] :
identifier[s2] , identifier[h2] , identifier[path2] , identifier[param2] , identifier[query2] , identifier[frag2] = identifier[urllib] . identifier[parse] . identifier[urlparse] ( identifier[fp] . identifier[url] )
keyword[if] identifier[s2] == identifier[scheme] keyword[and] identifier[h2] == identifier[host] :
identifier[parts] = identifier[s2] , identifier[netloc] , identifier[path2] , identifier[param2] , identifier[query2] , identifier[frag2]
identifier[fp] . identifier[url] = identifier[urllib] . identifier[parse] . identifier[urlunparse] ( identifier[parts] )
keyword[return] identifier[fp] | def open_with_auth(url, opener=urllib.request.urlopen):
"""Open a urllib2 request, handling HTTP authentication"""
(scheme, netloc, path, params, query, frag) = urllib.parse.urlparse(url)
# Double scheme does not raise on Mac OS X as revealed by a
# failing test. We would expect "nonnumeric port". Refs #20.
if netloc.endswith(':'):
raise http_client.InvalidURL("nonnumeric port: ''") # depends on [control=['if'], data=[]]
if scheme in ('http', 'https'):
(auth, host) = urllib.parse.splituser(netloc) # depends on [control=['if'], data=[]]
else:
auth = None
if not auth:
cred = PyPIConfig().find_credential(url)
if cred:
auth = str(cred)
info = (cred.username, url)
log.info('Authenticating as %s for %s (from .pypirc)', *info) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if auth:
auth = 'Basic ' + _encode_auth(auth)
parts = (scheme, host, path, params, query, frag)
new_url = urllib.parse.urlunparse(parts)
request = urllib.request.Request(new_url)
request.add_header('Authorization', auth) # depends on [control=['if'], data=[]]
else:
request = urllib.request.Request(url)
request.add_header('User-Agent', user_agent)
fp = opener(request)
if auth:
# Put authentication info back into request URL if same host,
# so that links found on the page will work
(s2, h2, path2, param2, query2, frag2) = urllib.parse.urlparse(fp.url)
if s2 == scheme and h2 == host:
parts = (s2, netloc, path2, param2, query2, frag2)
fp.url = urllib.parse.urlunparse(parts) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return fp |
def ner_chunk(args):
"""Chunk named entities."""
chunker = NEChunker(lang=args.lang)
tag(chunker, args) | def function[ner_chunk, parameter[args]]:
constant[Chunk named entities.]
variable[chunker] assign[=] call[name[NEChunker], parameter[]]
call[name[tag], parameter[name[chunker], name[args]]] | keyword[def] identifier[ner_chunk] ( identifier[args] ):
literal[string]
identifier[chunker] = identifier[NEChunker] ( identifier[lang] = identifier[args] . identifier[lang] )
identifier[tag] ( identifier[chunker] , identifier[args] ) | def ner_chunk(args):
"""Chunk named entities."""
chunker = NEChunker(lang=args.lang)
tag(chunker, args) |
def add_profile(self, namespace, key, value, force=False):
""" Add profile information to this node at the DAX level
"""
try:
entry = dax.Profile(namespace, key, value)
self._dax_node.addProfile(entry)
except dax.DuplicateError:
if force:
# Replace with the new key
self._dax_node.removeProfile(entry)
self._dax_node.addProfile(entry) | def function[add_profile, parameter[self, namespace, key, value, force]]:
constant[ Add profile information to this node at the DAX level
]
<ast.Try object at 0x7da18f810940> | keyword[def] identifier[add_profile] ( identifier[self] , identifier[namespace] , identifier[key] , identifier[value] , identifier[force] = keyword[False] ):
literal[string]
keyword[try] :
identifier[entry] = identifier[dax] . identifier[Profile] ( identifier[namespace] , identifier[key] , identifier[value] )
identifier[self] . identifier[_dax_node] . identifier[addProfile] ( identifier[entry] )
keyword[except] identifier[dax] . identifier[DuplicateError] :
keyword[if] identifier[force] :
identifier[self] . identifier[_dax_node] . identifier[removeProfile] ( identifier[entry] )
identifier[self] . identifier[_dax_node] . identifier[addProfile] ( identifier[entry] ) | def add_profile(self, namespace, key, value, force=False):
""" Add profile information to this node at the DAX level
"""
try:
entry = dax.Profile(namespace, key, value)
self._dax_node.addProfile(entry) # depends on [control=['try'], data=[]]
except dax.DuplicateError:
if force:
# Replace with the new key
self._dax_node.removeProfile(entry)
self._dax_node.addProfile(entry) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] |
def setCurrentRegItem(self, regItem):
""" Sets the current item to the regItem
"""
check_class(regItem, ClassRegItem, allow_none=True)
self.tableView.setCurrentRegItem(regItem) | def function[setCurrentRegItem, parameter[self, regItem]]:
constant[ Sets the current item to the regItem
]
call[name[check_class], parameter[name[regItem], name[ClassRegItem]]]
call[name[self].tableView.setCurrentRegItem, parameter[name[regItem]]] | keyword[def] identifier[setCurrentRegItem] ( identifier[self] , identifier[regItem] ):
literal[string]
identifier[check_class] ( identifier[regItem] , identifier[ClassRegItem] , identifier[allow_none] = keyword[True] )
identifier[self] . identifier[tableView] . identifier[setCurrentRegItem] ( identifier[regItem] ) | def setCurrentRegItem(self, regItem):
""" Sets the current item to the regItem
"""
check_class(regItem, ClassRegItem, allow_none=True)
self.tableView.setCurrentRegItem(regItem) |
def parse_input(self):
'''Parse the listings.
Returns:
iter: A iterable of :class:`.ftp.ls.listing.FileEntry`
'''
if self._text:
lines = iter(self._text.splitlines())
elif self._file:
lines = self._file
else:
lines = ()
sample_lines = []
for line in lines:
if len(sample_lines) > 100:
break
sample_lines.append(line)
lines = itertools.chain(sample_lines, lines)
self.guess_type(sample_lines)
datetime_format = wpull.protocol.ftp.ls.date.guess_datetime_format(
sample_lines)
self.set_datetime_format(datetime_format)
return self.parse(lines) | def function[parse_input, parameter[self]]:
constant[Parse the listings.
Returns:
iter: A iterable of :class:`.ftp.ls.listing.FileEntry`
]
if name[self]._text begin[:]
variable[lines] assign[=] call[name[iter], parameter[call[name[self]._text.splitlines, parameter[]]]]
variable[sample_lines] assign[=] list[[]]
for taget[name[line]] in starred[name[lines]] begin[:]
if compare[call[name[len], parameter[name[sample_lines]]] greater[>] constant[100]] begin[:]
break
call[name[sample_lines].append, parameter[name[line]]]
variable[lines] assign[=] call[name[itertools].chain, parameter[name[sample_lines], name[lines]]]
call[name[self].guess_type, parameter[name[sample_lines]]]
variable[datetime_format] assign[=] call[name[wpull].protocol.ftp.ls.date.guess_datetime_format, parameter[name[sample_lines]]]
call[name[self].set_datetime_format, parameter[name[datetime_format]]]
return[call[name[self].parse, parameter[name[lines]]]] | keyword[def] identifier[parse_input] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_text] :
identifier[lines] = identifier[iter] ( identifier[self] . identifier[_text] . identifier[splitlines] ())
keyword[elif] identifier[self] . identifier[_file] :
identifier[lines] = identifier[self] . identifier[_file]
keyword[else] :
identifier[lines] =()
identifier[sample_lines] =[]
keyword[for] identifier[line] keyword[in] identifier[lines] :
keyword[if] identifier[len] ( identifier[sample_lines] )> literal[int] :
keyword[break]
identifier[sample_lines] . identifier[append] ( identifier[line] )
identifier[lines] = identifier[itertools] . identifier[chain] ( identifier[sample_lines] , identifier[lines] )
identifier[self] . identifier[guess_type] ( identifier[sample_lines] )
identifier[datetime_format] = identifier[wpull] . identifier[protocol] . identifier[ftp] . identifier[ls] . identifier[date] . identifier[guess_datetime_format] (
identifier[sample_lines] )
identifier[self] . identifier[set_datetime_format] ( identifier[datetime_format] )
keyword[return] identifier[self] . identifier[parse] ( identifier[lines] ) | def parse_input(self):
"""Parse the listings.
Returns:
iter: A iterable of :class:`.ftp.ls.listing.FileEntry`
"""
if self._text:
lines = iter(self._text.splitlines()) # depends on [control=['if'], data=[]]
elif self._file:
lines = self._file # depends on [control=['if'], data=[]]
else:
lines = ()
sample_lines = []
for line in lines:
if len(sample_lines) > 100:
break # depends on [control=['if'], data=[]]
sample_lines.append(line) # depends on [control=['for'], data=['line']]
lines = itertools.chain(sample_lines, lines)
self.guess_type(sample_lines)
datetime_format = wpull.protocol.ftp.ls.date.guess_datetime_format(sample_lines)
self.set_datetime_format(datetime_format)
return self.parse(lines) |
def loads(s, **kwargs):
"""Loads JSON object."""
try:
return _engine[0](s)
except _engine[2]:
# except_clause: 'except' [test ['as' NAME]] # grammar for py3x
# except_clause: 'except' [test [('as' | ',') test]] # grammar for py2x
why = sys.exc_info()[1]
raise JSONError(why) | def function[loads, parameter[s]]:
constant[Loads JSON object.]
<ast.Try object at 0x7da18f721d80> | keyword[def] identifier[loads] ( identifier[s] ,** identifier[kwargs] ):
literal[string]
keyword[try] :
keyword[return] identifier[_engine] [ literal[int] ]( identifier[s] )
keyword[except] identifier[_engine] [ literal[int] ]:
identifier[why] = identifier[sys] . identifier[exc_info] ()[ literal[int] ]
keyword[raise] identifier[JSONError] ( identifier[why] ) | def loads(s, **kwargs):
"""Loads JSON object."""
try:
return _engine[0](s) # depends on [control=['try'], data=[]]
except _engine[2]:
# except_clause: 'except' [test ['as' NAME]] # grammar for py3x
# except_clause: 'except' [test [('as' | ',') test]] # grammar for py2x
why = sys.exc_info()[1]
raise JSONError(why) # depends on [control=['except'], data=[]] |
def top_i_answers(self, i):
"""获取排名在前几位的答案.
:param int i: 获取前几个
:return: 答案对象,返回生成器
:rtype: Answer.Iterable
"""
for j, a in enumerate(self.answers):
if j <= i - 1:
yield a
else:
return | def function[top_i_answers, parameter[self, i]]:
constant[获取排名在前几位的答案.
:param int i: 获取前几个
:return: 答案对象,返回生成器
:rtype: Answer.Iterable
]
for taget[tuple[[<ast.Name object at 0x7da20e9b24d0>, <ast.Name object at 0x7da20e9b1c60>]]] in starred[call[name[enumerate], parameter[name[self].answers]]] begin[:]
if compare[name[j] less_or_equal[<=] binary_operation[name[i] - constant[1]]] begin[:]
<ast.Yield object at 0x7da20e9b2ef0> | keyword[def] identifier[top_i_answers] ( identifier[self] , identifier[i] ):
literal[string]
keyword[for] identifier[j] , identifier[a] keyword[in] identifier[enumerate] ( identifier[self] . identifier[answers] ):
keyword[if] identifier[j] <= identifier[i] - literal[int] :
keyword[yield] identifier[a]
keyword[else] :
keyword[return] | def top_i_answers(self, i):
"""获取排名在前几位的答案.
:param int i: 获取前几个
:return: 答案对象,返回生成器
:rtype: Answer.Iterable
"""
for (j, a) in enumerate(self.answers):
if j <= i - 1:
yield a # depends on [control=['if'], data=[]]
else:
return # depends on [control=['for'], data=[]] |
def _Close(self):
"""Closes the file-like object."""
self._fsntfs_data_stream = None
self._fsntfs_file_entry = None
self._file_system.Close()
self._file_system = None | def function[_Close, parameter[self]]:
constant[Closes the file-like object.]
name[self]._fsntfs_data_stream assign[=] constant[None]
name[self]._fsntfs_file_entry assign[=] constant[None]
call[name[self]._file_system.Close, parameter[]]
name[self]._file_system assign[=] constant[None] | keyword[def] identifier[_Close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_fsntfs_data_stream] = keyword[None]
identifier[self] . identifier[_fsntfs_file_entry] = keyword[None]
identifier[self] . identifier[_file_system] . identifier[Close] ()
identifier[self] . identifier[_file_system] = keyword[None] | def _Close(self):
"""Closes the file-like object."""
self._fsntfs_data_stream = None
self._fsntfs_file_entry = None
self._file_system.Close()
self._file_system = None |
def get_action_list(self, page=1):
""" Получение списка событий
:param page: (опционально) номер страницы, начинается с 1
:param filter: todo
:return:
"""
kw = {}
if page:
kw['inum'] = page
actions, _ = self._call('getactionlist', **kw)
result_list = []
for action in actions:
result_list.append(ActionInList.object_from_api(action))
return result_list | def function[get_action_list, parameter[self, page]]:
constant[ Получение списка событий
:param page: (опционально) номер страницы, начинается с 1
:param filter: todo
:return:
]
variable[kw] assign[=] dictionary[[], []]
if name[page] begin[:]
call[name[kw]][constant[inum]] assign[=] name[page]
<ast.Tuple object at 0x7da2054a7c70> assign[=] call[name[self]._call, parameter[constant[getactionlist]]]
variable[result_list] assign[=] list[[]]
for taget[name[action]] in starred[name[actions]] begin[:]
call[name[result_list].append, parameter[call[name[ActionInList].object_from_api, parameter[name[action]]]]]
return[name[result_list]] | keyword[def] identifier[get_action_list] ( identifier[self] , identifier[page] = literal[int] ):
literal[string]
identifier[kw] ={}
keyword[if] identifier[page] :
identifier[kw] [ literal[string] ]= identifier[page]
identifier[actions] , identifier[_] = identifier[self] . identifier[_call] ( literal[string] ,** identifier[kw] )
identifier[result_list] =[]
keyword[for] identifier[action] keyword[in] identifier[actions] :
identifier[result_list] . identifier[append] ( identifier[ActionInList] . identifier[object_from_api] ( identifier[action] ))
keyword[return] identifier[result_list] | def get_action_list(self, page=1):
""" Получение списка событий
:param page: (опционально) номер страницы, начинается с 1
:param filter: todo
:return:
"""
kw = {}
if page:
kw['inum'] = page # depends on [control=['if'], data=[]]
(actions, _) = self._call('getactionlist', **kw)
result_list = []
for action in actions:
result_list.append(ActionInList.object_from_api(action)) # depends on [control=['for'], data=['action']]
return result_list |
def fwdl_status_output_fwdl_entries_blade_swbd(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fwdl_status = ET.Element("fwdl_status")
config = fwdl_status
output = ET.SubElement(fwdl_status, "output")
fwdl_entries = ET.SubElement(output, "fwdl-entries")
blade_swbd = ET.SubElement(fwdl_entries, "blade-swbd")
blade_swbd.text = kwargs.pop('blade_swbd')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[fwdl_status_output_fwdl_entries_blade_swbd, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[fwdl_status] assign[=] call[name[ET].Element, parameter[constant[fwdl_status]]]
variable[config] assign[=] name[fwdl_status]
variable[output] assign[=] call[name[ET].SubElement, parameter[name[fwdl_status], constant[output]]]
variable[fwdl_entries] assign[=] call[name[ET].SubElement, parameter[name[output], constant[fwdl-entries]]]
variable[blade_swbd] assign[=] call[name[ET].SubElement, parameter[name[fwdl_entries], constant[blade-swbd]]]
name[blade_swbd].text assign[=] call[name[kwargs].pop, parameter[constant[blade_swbd]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[fwdl_status_output_fwdl_entries_blade_swbd] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[fwdl_status] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[fwdl_status]
identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[fwdl_status] , literal[string] )
identifier[fwdl_entries] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] )
identifier[blade_swbd] = identifier[ET] . identifier[SubElement] ( identifier[fwdl_entries] , literal[string] )
identifier[blade_swbd] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def fwdl_status_output_fwdl_entries_blade_swbd(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
fwdl_status = ET.Element('fwdl_status')
config = fwdl_status
output = ET.SubElement(fwdl_status, 'output')
fwdl_entries = ET.SubElement(output, 'fwdl-entries')
blade_swbd = ET.SubElement(fwdl_entries, 'blade-swbd')
blade_swbd.text = kwargs.pop('blade_swbd')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def find_block_end(row, line_list, sentinal, direction=1):
"""
Searches up and down until it finds the endpoints of a block Rectify
with find_paragraph_end in pyvim_funcs
"""
import re
row_ = row
line_ = line_list[row_]
flag1 = row_ == 0 or row_ == len(line_list) - 1
flag2 = re.match(sentinal, line_)
if not (flag1 or flag2):
while True:
if (row_ == 0 or row_ == len(line_list) - 1):
break
line_ = line_list[row_]
if re.match(sentinal, line_):
break
row_ += direction
return row_ | def function[find_block_end, parameter[row, line_list, sentinal, direction]]:
constant[
Searches up and down until it finds the endpoints of a block Rectify
with find_paragraph_end in pyvim_funcs
]
import module[re]
variable[row_] assign[=] name[row]
variable[line_] assign[=] call[name[line_list]][name[row_]]
variable[flag1] assign[=] <ast.BoolOp object at 0x7da1b2384e80>
variable[flag2] assign[=] call[name[re].match, parameter[name[sentinal], name[line_]]]
if <ast.UnaryOp object at 0x7da1b2387130> begin[:]
while constant[True] begin[:]
if <ast.BoolOp object at 0x7da1b2385420> begin[:]
break
variable[line_] assign[=] call[name[line_list]][name[row_]]
if call[name[re].match, parameter[name[sentinal], name[line_]]] begin[:]
break
<ast.AugAssign object at 0x7da1b24eb130>
return[name[row_]] | keyword[def] identifier[find_block_end] ( identifier[row] , identifier[line_list] , identifier[sentinal] , identifier[direction] = literal[int] ):
literal[string]
keyword[import] identifier[re]
identifier[row_] = identifier[row]
identifier[line_] = identifier[line_list] [ identifier[row_] ]
identifier[flag1] = identifier[row_] == literal[int] keyword[or] identifier[row_] == identifier[len] ( identifier[line_list] )- literal[int]
identifier[flag2] = identifier[re] . identifier[match] ( identifier[sentinal] , identifier[line_] )
keyword[if] keyword[not] ( identifier[flag1] keyword[or] identifier[flag2] ):
keyword[while] keyword[True] :
keyword[if] ( identifier[row_] == literal[int] keyword[or] identifier[row_] == identifier[len] ( identifier[line_list] )- literal[int] ):
keyword[break]
identifier[line_] = identifier[line_list] [ identifier[row_] ]
keyword[if] identifier[re] . identifier[match] ( identifier[sentinal] , identifier[line_] ):
keyword[break]
identifier[row_] += identifier[direction]
keyword[return] identifier[row_] | def find_block_end(row, line_list, sentinal, direction=1):
"""
Searches up and down until it finds the endpoints of a block Rectify
with find_paragraph_end in pyvim_funcs
"""
import re
row_ = row
line_ = line_list[row_]
flag1 = row_ == 0 or row_ == len(line_list) - 1
flag2 = re.match(sentinal, line_)
if not (flag1 or flag2):
while True:
if row_ == 0 or row_ == len(line_list) - 1:
break # depends on [control=['if'], data=[]]
line_ = line_list[row_]
if re.match(sentinal, line_):
break # depends on [control=['if'], data=[]]
row_ += direction # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
return row_ |
def generate(model_num):
"""Generates a new model name, given the model number."""
if model_num == 0:
new_name = 'bootstrap'
else:
new_name = random.choice(NAMES)
full_name = "%06d-%s" % (model_num, new_name)
return full_name | def function[generate, parameter[model_num]]:
constant[Generates a new model name, given the model number.]
if compare[name[model_num] equal[==] constant[0]] begin[:]
variable[new_name] assign[=] constant[bootstrap]
variable[full_name] assign[=] binary_operation[constant[%06d-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f812a10>, <ast.Name object at 0x7da18f8111e0>]]]
return[name[full_name]] | keyword[def] identifier[generate] ( identifier[model_num] ):
literal[string]
keyword[if] identifier[model_num] == literal[int] :
identifier[new_name] = literal[string]
keyword[else] :
identifier[new_name] = identifier[random] . identifier[choice] ( identifier[NAMES] )
identifier[full_name] = literal[string] %( identifier[model_num] , identifier[new_name] )
keyword[return] identifier[full_name] | def generate(model_num):
"""Generates a new model name, given the model number."""
if model_num == 0:
new_name = 'bootstrap' # depends on [control=['if'], data=[]]
else:
new_name = random.choice(NAMES)
full_name = '%06d-%s' % (model_num, new_name)
return full_name |
def cancel_operation(
self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT
):
"""Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success is
not guaranteed. Clients can use :meth:`get_operation` or service-
specific methods to check whether the cancellation succeeded or whether
the operation completed despite cancellation. On successful
cancellation, the operation is not deleted; instead, it becomes an
operation with an ``Operation.error`` value with a
``google.rpc.Status.code`` of ``1``, corresponding to
``Code.CANCELLED``.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.cancel_operation(name)
Args:
name (str): The name of the operation resource to be cancelled.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
"""
# Create the request object.
request = operations_pb2.CancelOperationRequest(name=name)
self._cancel_operation(request, retry=retry, timeout=timeout) | def function[cancel_operation, parameter[self, name, retry, timeout]]:
constant[Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success is
not guaranteed. Clients can use :meth:`get_operation` or service-
specific methods to check whether the cancellation succeeded or whether
the operation completed despite cancellation. On successful
cancellation, the operation is not deleted; instead, it becomes an
operation with an ``Operation.error`` value with a
``google.rpc.Status.code`` of ``1``, corresponding to
``Code.CANCELLED``.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.cancel_operation(name)
Args:
name (str): The name of the operation resource to be cancelled.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
]
variable[request] assign[=] call[name[operations_pb2].CancelOperationRequest, parameter[]]
call[name[self]._cancel_operation, parameter[name[request]]] | keyword[def] identifier[cancel_operation] (
identifier[self] , identifier[name] , identifier[retry] = identifier[gapic_v1] . identifier[method] . identifier[DEFAULT] , identifier[timeout] = identifier[gapic_v1] . identifier[method] . identifier[DEFAULT]
):
literal[string]
identifier[request] = identifier[operations_pb2] . identifier[CancelOperationRequest] ( identifier[name] = identifier[name] )
identifier[self] . identifier[_cancel_operation] ( identifier[request] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] ) | def cancel_operation(self, name, retry=gapic_v1.method.DEFAULT, timeout=gapic_v1.method.DEFAULT):
"""Starts asynchronous cancellation on a long-running operation.
The server makes a best effort to cancel the operation, but success is
not guaranteed. Clients can use :meth:`get_operation` or service-
specific methods to check whether the cancellation succeeded or whether
the operation completed despite cancellation. On successful
cancellation, the operation is not deleted; instead, it becomes an
operation with an ``Operation.error`` value with a
``google.rpc.Status.code`` of ``1``, corresponding to
``Code.CANCELLED``.
Example:
>>> from google.api_core import operations_v1
>>> api = operations_v1.OperationsClient()
>>> name = ''
>>> api.cancel_operation(name)
Args:
name (str): The name of the operation resource to be cancelled.
retry (google.api_core.retry.Retry): The retry strategy to use
when invoking the RPC. If unspecified, the default retry from
the client configuration will be used. If ``None``, then this
method will not retry the RPC at all.
timeout (float): The amount of time in seconds to wait for the RPC
to complete. Note that if ``retry`` is used, this timeout
applies to each individual attempt and the overall time it
takes for this method to complete may be longer. If
unspecified, the the default timeout in the client
configuration is used. If ``None``, then the RPC method will
not time out.
Raises:
google.api_core.exceptions.MethodNotImplemented: If the server
does not support this method. Services are not required to
implement this method.
google.api_core.exceptions.GoogleAPICallError: If an error occurred
while invoking the RPC, the appropriate ``GoogleAPICallError``
subclass will be raised.
"""
# Create the request object.
request = operations_pb2.CancelOperationRequest(name=name)
self._cancel_operation(request, retry=retry, timeout=timeout) |
def getFileNameMime(self, requestedUrl, *args, **kwargs):
'''
Give a requested page (note: the arguments for this call are forwarded to getpage()),
return the content at the target URL, the filename for the target content, and
the mimetype for the content at the target URL, as a 3-tuple (pgctnt, hName, mime).
The filename specified in the content-disposition header is used, if present. Otherwise,
the last section of the url path segment is treated as the filename.
'''
if 'returnMultiple' in kwargs:
raise Exceptions.ArgumentError("getFileAndName cannot be called with 'returnMultiple'", requestedUrl)
if 'soup' in kwargs and kwargs['soup']:
raise Exceptions.ArgumentError("getFileAndName contradicts the 'soup' directive!", requestedUrl)
kwargs["returnMultiple"] = True
pgctnt, pghandle = self.getpage(requestedUrl, *args, **kwargs)
info = pghandle.info()
if not 'Content-Disposition' in info:
hName = ''
elif not 'filename=' in info['Content-Disposition']:
hName = ''
else:
hName = info['Content-Disposition'].split('filename=')[1]
# Unquote filename if it's quoted.
if ((hName.startswith("'") and hName.endswith("'")) or hName.startswith('"') and hName.endswith('"')) and len(hName) >= 2:
hName = hName[1:-1]
mime = info.get_content_type()
if not hName.strip():
requestedUrl = pghandle.geturl()
hName = urllib.parse.urlsplit(requestedUrl).path.split("/")[-1].strip()
if "/" in hName:
hName = hName.split("/")[-1]
return pgctnt, hName, mime | def function[getFileNameMime, parameter[self, requestedUrl]]:
constant[
Give a requested page (note: the arguments for this call are forwarded to getpage()),
return the content at the target URL, the filename for the target content, and
the mimetype for the content at the target URL, as a 3-tuple (pgctnt, hName, mime).
The filename specified in the content-disposition header is used, if present. Otherwise,
the last section of the url path segment is treated as the filename.
]
if compare[constant[returnMultiple] in name[kwargs]] begin[:]
<ast.Raise object at 0x7da1b26ad600>
if <ast.BoolOp object at 0x7da1b26ad630> begin[:]
<ast.Raise object at 0x7da1b26af6d0>
call[name[kwargs]][constant[returnMultiple]] assign[=] constant[True]
<ast.Tuple object at 0x7da1b26ac6d0> assign[=] call[name[self].getpage, parameter[name[requestedUrl], <ast.Starred object at 0x7da1b26ad4e0>]]
variable[info] assign[=] call[name[pghandle].info, parameter[]]
if <ast.UnaryOp object at 0x7da1b0b39420> begin[:]
variable[hName] assign[=] constant[]
variable[mime] assign[=] call[name[info].get_content_type, parameter[]]
if <ast.UnaryOp object at 0x7da1b0b3a860> begin[:]
variable[requestedUrl] assign[=] call[name[pghandle].geturl, parameter[]]
variable[hName] assign[=] call[call[call[call[name[urllib].parse.urlsplit, parameter[name[requestedUrl]]].path.split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b26ac5e0>].strip, parameter[]]
if compare[constant[/] in name[hName]] begin[:]
variable[hName] assign[=] call[call[name[hName].split, parameter[constant[/]]]][<ast.UnaryOp object at 0x7da1b26ae290>]
return[tuple[[<ast.Name object at 0x7da1b26ac340>, <ast.Name object at 0x7da1b26adea0>, <ast.Name object at 0x7da1b26ac880>]]] | keyword[def] identifier[getFileNameMime] ( identifier[self] , identifier[requestedUrl] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
keyword[raise] identifier[Exceptions] . identifier[ArgumentError] ( literal[string] , identifier[requestedUrl] )
keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ]:
keyword[raise] identifier[Exceptions] . identifier[ArgumentError] ( literal[string] , identifier[requestedUrl] )
identifier[kwargs] [ literal[string] ]= keyword[True]
identifier[pgctnt] , identifier[pghandle] = identifier[self] . identifier[getpage] ( identifier[requestedUrl] ,* identifier[args] ,** identifier[kwargs] )
identifier[info] = identifier[pghandle] . identifier[info] ()
keyword[if] keyword[not] literal[string] keyword[in] identifier[info] :
identifier[hName] = literal[string]
keyword[elif] keyword[not] literal[string] keyword[in] identifier[info] [ literal[string] ]:
identifier[hName] = literal[string]
keyword[else] :
identifier[hName] = identifier[info] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] ]
keyword[if] (( identifier[hName] . identifier[startswith] ( literal[string] ) keyword[and] identifier[hName] . identifier[endswith] ( literal[string] )) keyword[or] identifier[hName] . identifier[startswith] ( literal[string] ) keyword[and] identifier[hName] . identifier[endswith] ( literal[string] )) keyword[and] identifier[len] ( identifier[hName] )>= literal[int] :
identifier[hName] = identifier[hName] [ literal[int] :- literal[int] ]
identifier[mime] = identifier[info] . identifier[get_content_type] ()
keyword[if] keyword[not] identifier[hName] . identifier[strip] ():
identifier[requestedUrl] = identifier[pghandle] . identifier[geturl] ()
identifier[hName] = identifier[urllib] . identifier[parse] . identifier[urlsplit] ( identifier[requestedUrl] ). identifier[path] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[strip] ()
keyword[if] literal[string] keyword[in] identifier[hName] :
identifier[hName] = identifier[hName] . identifier[split] ( literal[string] )[- literal[int] ]
keyword[return] identifier[pgctnt] , identifier[hName] , identifier[mime] | def getFileNameMime(self, requestedUrl, *args, **kwargs):
"""
Give a requested page (note: the arguments for this call are forwarded to getpage()),
return the content at the target URL, the filename for the target content, and
the mimetype for the content at the target URL, as a 3-tuple (pgctnt, hName, mime).
The filename specified in the content-disposition header is used, if present. Otherwise,
the last section of the url path segment is treated as the filename.
"""
if 'returnMultiple' in kwargs:
raise Exceptions.ArgumentError("getFileAndName cannot be called with 'returnMultiple'", requestedUrl) # depends on [control=['if'], data=[]]
if 'soup' in kwargs and kwargs['soup']:
raise Exceptions.ArgumentError("getFileAndName contradicts the 'soup' directive!", requestedUrl) # depends on [control=['if'], data=[]]
kwargs['returnMultiple'] = True
(pgctnt, pghandle) = self.getpage(requestedUrl, *args, **kwargs)
info = pghandle.info()
if not 'Content-Disposition' in info:
hName = '' # depends on [control=['if'], data=[]]
elif not 'filename=' in info['Content-Disposition']:
hName = '' # depends on [control=['if'], data=[]]
else:
hName = info['Content-Disposition'].split('filename=')[1] # Unquote filename if it's quoted.
if (hName.startswith("'") and hName.endswith("'") or (hName.startswith('"') and hName.endswith('"'))) and len(hName) >= 2:
hName = hName[1:-1] # depends on [control=['if'], data=[]]
mime = info.get_content_type()
if not hName.strip():
requestedUrl = pghandle.geturl()
hName = urllib.parse.urlsplit(requestedUrl).path.split('/')[-1].strip() # depends on [control=['if'], data=[]]
if '/' in hName:
hName = hName.split('/')[-1] # depends on [control=['if'], data=['hName']]
return (pgctnt, hName, mime) |
def uddc(udfunc, x, dx):
"""
SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool
"""
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
isdescr = ctypes.c_int()
libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr))
return bool(isdescr.value) | def function[uddc, parameter[udfunc, x, dx]]:
constant[
SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool
]
variable[x] assign[=] call[name[ctypes].c_double, parameter[name[x]]]
variable[dx] assign[=] call[name[ctypes].c_double, parameter[name[dx]]]
variable[isdescr] assign[=] call[name[ctypes].c_int, parameter[]]
call[name[libspice].uddc_c, parameter[name[udfunc], name[x], name[dx], call[name[ctypes].byref, parameter[name[isdescr]]]]]
return[call[name[bool], parameter[name[isdescr].value]]] | keyword[def] identifier[uddc] ( identifier[udfunc] , identifier[x] , identifier[dx] ):
literal[string]
identifier[x] = identifier[ctypes] . identifier[c_double] ( identifier[x] )
identifier[dx] = identifier[ctypes] . identifier[c_double] ( identifier[dx] )
identifier[isdescr] = identifier[ctypes] . identifier[c_int] ()
identifier[libspice] . identifier[uddc_c] ( identifier[udfunc] , identifier[x] , identifier[dx] , identifier[ctypes] . identifier[byref] ( identifier[isdescr] ))
keyword[return] identifier[bool] ( identifier[isdescr] . identifier[value] ) | def uddc(udfunc, x, dx):
"""
SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool
"""
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
isdescr = ctypes.c_int()
libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr))
return bool(isdescr.value) |
def dok15_s(k15):
"""
calculates least-squares matrix for 15 measurements from Jelinek [1976]
"""
#
A, B = design(15) # get design matrix for 15 measurements
sbar = np.dot(B, k15) # get mean s
t = (sbar[0] + sbar[1] + sbar[2]) # trace
bulk = old_div(t, 3.) # bulk susceptibility
Kbar = np.dot(A, sbar) # get best fit values for K
dels = k15 - Kbar # get deltas
dels, sbar = old_div(dels, t), old_div(sbar, t) # normalize by trace
So = sum(dels**2)
sigma = np.sqrt(old_div(So, 9.)) # standard deviation
return sbar, sigma, bulk | def function[dok15_s, parameter[k15]]:
constant[
calculates least-squares matrix for 15 measurements from Jelinek [1976]
]
<ast.Tuple object at 0x7da204963730> assign[=] call[name[design], parameter[constant[15]]]
variable[sbar] assign[=] call[name[np].dot, parameter[name[B], name[k15]]]
variable[t] assign[=] binary_operation[binary_operation[call[name[sbar]][constant[0]] + call[name[sbar]][constant[1]]] + call[name[sbar]][constant[2]]]
variable[bulk] assign[=] call[name[old_div], parameter[name[t], constant[3.0]]]
variable[Kbar] assign[=] call[name[np].dot, parameter[name[A], name[sbar]]]
variable[dels] assign[=] binary_operation[name[k15] - name[Kbar]]
<ast.Tuple object at 0x7da2054a5630> assign[=] tuple[[<ast.Call object at 0x7da2054a43d0>, <ast.Call object at 0x7da2054a6f20>]]
variable[So] assign[=] call[name[sum], parameter[binary_operation[name[dels] ** constant[2]]]]
variable[sigma] assign[=] call[name[np].sqrt, parameter[call[name[old_div], parameter[name[So], constant[9.0]]]]]
return[tuple[[<ast.Name object at 0x7da2054a7910>, <ast.Name object at 0x7da2054a6260>, <ast.Name object at 0x7da2054a7a30>]]] | keyword[def] identifier[dok15_s] ( identifier[k15] ):
literal[string]
identifier[A] , identifier[B] = identifier[design] ( literal[int] )
identifier[sbar] = identifier[np] . identifier[dot] ( identifier[B] , identifier[k15] )
identifier[t] =( identifier[sbar] [ literal[int] ]+ identifier[sbar] [ literal[int] ]+ identifier[sbar] [ literal[int] ])
identifier[bulk] = identifier[old_div] ( identifier[t] , literal[int] )
identifier[Kbar] = identifier[np] . identifier[dot] ( identifier[A] , identifier[sbar] )
identifier[dels] = identifier[k15] - identifier[Kbar]
identifier[dels] , identifier[sbar] = identifier[old_div] ( identifier[dels] , identifier[t] ), identifier[old_div] ( identifier[sbar] , identifier[t] )
identifier[So] = identifier[sum] ( identifier[dels] ** literal[int] )
identifier[sigma] = identifier[np] . identifier[sqrt] ( identifier[old_div] ( identifier[So] , literal[int] ))
keyword[return] identifier[sbar] , identifier[sigma] , identifier[bulk] | def dok15_s(k15):
"""
calculates least-squares matrix for 15 measurements from Jelinek [1976]
"""
#
(A, B) = design(15) # get design matrix for 15 measurements
sbar = np.dot(B, k15) # get mean s
t = sbar[0] + sbar[1] + sbar[2] # trace
bulk = old_div(t, 3.0) # bulk susceptibility
Kbar = np.dot(A, sbar) # get best fit values for K
dels = k15 - Kbar # get deltas
(dels, sbar) = (old_div(dels, t), old_div(sbar, t)) # normalize by trace
So = sum(dels ** 2)
sigma = np.sqrt(old_div(So, 9.0)) # standard deviation
return (sbar, sigma, bulk) |
def adduser(username, password=None, shell='/bin/bash',
system_user=False, primary_group=None,
secondary_groups=None, uid=None, home_dir=None):
"""Add a user to the system.
Will log but otherwise succeed if the user already exists.
:param str username: Username to create
:param str password: Password for user; if ``None``, create a system user
:param str shell: The default shell for the user
:param bool system_user: Whether to create a login or system user
:param str primary_group: Primary group for user; defaults to username
:param list secondary_groups: Optional list of additional groups
:param int uid: UID for user being created
:param str home_dir: Home directory for user
:returns: The password database entry struct, as returned by `pwd.getpwnam`
"""
try:
user_info = pwd.getpwnam(username)
log('user {0} already exists!'.format(username))
if uid:
user_info = pwd.getpwuid(int(uid))
log('user with uid {0} already exists!'.format(uid))
except KeyError:
log('creating user {0}'.format(username))
cmd = ['useradd']
if uid:
cmd.extend(['--uid', str(uid)])
if home_dir:
cmd.extend(['--home', str(home_dir)])
if system_user or password is None:
cmd.append('--system')
else:
cmd.extend([
'--create-home',
'--shell', shell,
'--password', password,
])
if not primary_group:
try:
grp.getgrnam(username)
primary_group = username # avoid "group exists" error
except KeyError:
pass
if primary_group:
cmd.extend(['-g', primary_group])
if secondary_groups:
cmd.extend(['-G', ','.join(secondary_groups)])
cmd.append(username)
subprocess.check_call(cmd)
user_info = pwd.getpwnam(username)
return user_info | def function[adduser, parameter[username, password, shell, system_user, primary_group, secondary_groups, uid, home_dir]]:
constant[Add a user to the system.
Will log but otherwise succeed if the user already exists.
:param str username: Username to create
:param str password: Password for user; if ``None``, create a system user
:param str shell: The default shell for the user
:param bool system_user: Whether to create a login or system user
:param str primary_group: Primary group for user; defaults to username
:param list secondary_groups: Optional list of additional groups
:param int uid: UID for user being created
:param str home_dir: Home directory for user
:returns: The password database entry struct, as returned by `pwd.getpwnam`
]
<ast.Try object at 0x7da18f09f610>
return[name[user_info]] | keyword[def] identifier[adduser] ( identifier[username] , identifier[password] = keyword[None] , identifier[shell] = literal[string] ,
identifier[system_user] = keyword[False] , identifier[primary_group] = keyword[None] ,
identifier[secondary_groups] = keyword[None] , identifier[uid] = keyword[None] , identifier[home_dir] = keyword[None] ):
literal[string]
keyword[try] :
identifier[user_info] = identifier[pwd] . identifier[getpwnam] ( identifier[username] )
identifier[log] ( literal[string] . identifier[format] ( identifier[username] ))
keyword[if] identifier[uid] :
identifier[user_info] = identifier[pwd] . identifier[getpwuid] ( identifier[int] ( identifier[uid] ))
identifier[log] ( literal[string] . identifier[format] ( identifier[uid] ))
keyword[except] identifier[KeyError] :
identifier[log] ( literal[string] . identifier[format] ( identifier[username] ))
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[uid] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[str] ( identifier[uid] )])
keyword[if] identifier[home_dir] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[str] ( identifier[home_dir] )])
keyword[if] identifier[system_user] keyword[or] identifier[password] keyword[is] keyword[None] :
identifier[cmd] . identifier[append] ( literal[string] )
keyword[else] :
identifier[cmd] . identifier[extend] ([
literal[string] ,
literal[string] , identifier[shell] ,
literal[string] , identifier[password] ,
])
keyword[if] keyword[not] identifier[primary_group] :
keyword[try] :
identifier[grp] . identifier[getgrnam] ( identifier[username] )
identifier[primary_group] = identifier[username]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[if] identifier[primary_group] :
identifier[cmd] . identifier[extend] ([ literal[string] , identifier[primary_group] ])
keyword[if] identifier[secondary_groups] :
identifier[cmd] . identifier[extend] ([ literal[string] , literal[string] . identifier[join] ( identifier[secondary_groups] )])
identifier[cmd] . identifier[append] ( identifier[username] )
identifier[subprocess] . identifier[check_call] ( identifier[cmd] )
identifier[user_info] = identifier[pwd] . identifier[getpwnam] ( identifier[username] )
keyword[return] identifier[user_info] | def adduser(username, password=None, shell='/bin/bash', system_user=False, primary_group=None, secondary_groups=None, uid=None, home_dir=None):
"""Add a user to the system.
Will log but otherwise succeed if the user already exists.
:param str username: Username to create
:param str password: Password for user; if ``None``, create a system user
:param str shell: The default shell for the user
:param bool system_user: Whether to create a login or system user
:param str primary_group: Primary group for user; defaults to username
:param list secondary_groups: Optional list of additional groups
:param int uid: UID for user being created
:param str home_dir: Home directory for user
:returns: The password database entry struct, as returned by `pwd.getpwnam`
"""
try:
user_info = pwd.getpwnam(username)
log('user {0} already exists!'.format(username))
if uid:
user_info = pwd.getpwuid(int(uid))
log('user with uid {0} already exists!'.format(uid)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except KeyError:
log('creating user {0}'.format(username))
cmd = ['useradd']
if uid:
cmd.extend(['--uid', str(uid)]) # depends on [control=['if'], data=[]]
if home_dir:
cmd.extend(['--home', str(home_dir)]) # depends on [control=['if'], data=[]]
if system_user or password is None:
cmd.append('--system') # depends on [control=['if'], data=[]]
else:
cmd.extend(['--create-home', '--shell', shell, '--password', password])
if not primary_group:
try:
grp.getgrnam(username)
primary_group = username # avoid "group exists" error # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
if primary_group:
cmd.extend(['-g', primary_group]) # depends on [control=['if'], data=[]]
if secondary_groups:
cmd.extend(['-G', ','.join(secondary_groups)]) # depends on [control=['if'], data=[]]
cmd.append(username)
subprocess.check_call(cmd)
user_info = pwd.getpwnam(username) # depends on [control=['except'], data=[]]
return user_info |
def post_message(session, thread_id, message):
"""
Add a message to a thread
"""
headers = {
'Content-Type': 'application/x-www-form-urlencoded'
}
message_data = {
'message': message,
}
# POST /api/messages/0.1/threads/{thread_id}/messages/
endpoint = 'threads/{}/messages'.format(thread_id)
response = make_post_request(session, endpoint, headers,
form_data=message_data)
json_data = response.json()
if response.status_code == 200:
return Message(json_data['result'])
else:
raise MessageNotCreatedException(message=json_data['message'],
error_code=json_data['error_code'],
request_id=json_data['request_id']) | def function[post_message, parameter[session, thread_id, message]]:
constant[
Add a message to a thread
]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b0004cd0>], [<ast.Constant object at 0x7da1b0005ba0>]]
variable[message_data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0005e70>], [<ast.Name object at 0x7da1b0004ee0>]]
variable[endpoint] assign[=] call[constant[threads/{}/messages].format, parameter[name[thread_id]]]
variable[response] assign[=] call[name[make_post_request], parameter[name[session], name[endpoint], name[headers]]]
variable[json_data] assign[=] call[name[response].json, parameter[]]
if compare[name[response].status_code equal[==] constant[200]] begin[:]
return[call[name[Message], parameter[call[name[json_data]][constant[result]]]]] | keyword[def] identifier[post_message] ( identifier[session] , identifier[thread_id] , identifier[message] ):
literal[string]
identifier[headers] ={
literal[string] : literal[string]
}
identifier[message_data] ={
literal[string] : identifier[message] ,
}
identifier[endpoint] = literal[string] . identifier[format] ( identifier[thread_id] )
identifier[response] = identifier[make_post_request] ( identifier[session] , identifier[endpoint] , identifier[headers] ,
identifier[form_data] = identifier[message_data] )
identifier[json_data] = identifier[response] . identifier[json] ()
keyword[if] identifier[response] . identifier[status_code] == literal[int] :
keyword[return] identifier[Message] ( identifier[json_data] [ literal[string] ])
keyword[else] :
keyword[raise] identifier[MessageNotCreatedException] ( identifier[message] = identifier[json_data] [ literal[string] ],
identifier[error_code] = identifier[json_data] [ literal[string] ],
identifier[request_id] = identifier[json_data] [ literal[string] ]) | def post_message(session, thread_id, message):
"""
Add a message to a thread
"""
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
message_data = {'message': message}
# POST /api/messages/0.1/threads/{thread_id}/messages/
endpoint = 'threads/{}/messages'.format(thread_id)
response = make_post_request(session, endpoint, headers, form_data=message_data)
json_data = response.json()
if response.status_code == 200:
return Message(json_data['result']) # depends on [control=['if'], data=[]]
else:
raise MessageNotCreatedException(message=json_data['message'], error_code=json_data['error_code'], request_id=json_data['request_id']) |
def getCmd(snmpEngine, authData, transportTarget, contextData,
*varBinds, **options):
"""Creates a generator to perform one or more SNMP GET queries.
On each iteration, new SNMP GET request is send (:RFC:`1905#section-4.2.1`).
The iterator blocks waiting for response to arrive or error to occur.
Parameters
----------
snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :py:class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
\*varBinds : :py:class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
Yields
------
errorIndication : str
True value indicates SNMP engine error.
errorStatus : str
True value indicates SNMP PDU error.
errorIndex : int
Non-zero value refers to `varBinds[errorIndex-1]`
varBinds : tuple
A sequence of :py:class:`~pysnmp.smi.rfc1902.ObjectType` class
instances representing MIB variables returned in SNMP response.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
The `getCmd` generator will be exhausted immediately unless
a new sequence of `varBinds` are send back into running generator
(supported since Python 2.6).
Examples
--------
>>> from pysnmp.hlapi import *
>>> g = getCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr', 0)))
>>> next(g)
(None, 0, 0, [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))])
>>>
"""
# noinspection PyShadowingNames
def cbFun(snmpEngine, sendRequestHandle,
errorIndication, errorStatus, errorIndex,
varBinds, cbCtx):
cbCtx['errorIndication'] = errorIndication
cbCtx['errorStatus'] = errorStatus
cbCtx['errorIndex'] = errorIndex
cbCtx['varBinds'] = varBinds
cbCtx = {}
while True:
if varBinds:
cmdgen.getCmd(snmpEngine, authData, transportTarget,
contextData, *varBinds,
cbFun=cbFun, cbCtx=cbCtx,
lookupMib=options.get('lookupMib', True))
snmpEngine.transportDispatcher.runDispatcher()
errorIndication = cbCtx['errorIndication']
errorStatus = cbCtx['errorStatus']
errorIndex = cbCtx['errorIndex']
varBinds = cbCtx['varBinds']
else:
errorIndication = errorStatus = errorIndex = None
varBinds = []
varBinds = (yield errorIndication, errorStatus, errorIndex, varBinds)
if not varBinds:
break | def function[getCmd, parameter[snmpEngine, authData, transportTarget, contextData]]:
constant[Creates a generator to perform one or more SNMP GET queries.
On each iteration, new SNMP GET request is send (:RFC:`1905#section-4.2.1`).
The iterator blocks waiting for response to arrive or error to occur.
Parameters
----------
snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :py:class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
\*varBinds : :py:class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\*\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
Yields
------
errorIndication : str
True value indicates SNMP engine error.
errorStatus : str
True value indicates SNMP PDU error.
errorIndex : int
Non-zero value refers to `varBinds[errorIndex-1]`
varBinds : tuple
A sequence of :py:class:`~pysnmp.smi.rfc1902.ObjectType` class
instances representing MIB variables returned in SNMP response.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
The `getCmd` generator will be exhausted immediately unless
a new sequence of `varBinds` are send back into running generator
(supported since Python 2.6).
Examples
--------
>>> from pysnmp.hlapi import *
>>> g = getCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr', 0)))
>>> next(g)
(None, 0, 0, [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))])
>>>
]
def function[cbFun, parameter[snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBinds, cbCtx]]:
call[name[cbCtx]][constant[errorIndication]] assign[=] name[errorIndication]
call[name[cbCtx]][constant[errorStatus]] assign[=] name[errorStatus]
call[name[cbCtx]][constant[errorIndex]] assign[=] name[errorIndex]
call[name[cbCtx]][constant[varBinds]] assign[=] name[varBinds]
variable[cbCtx] assign[=] dictionary[[], []]
while constant[True] begin[:]
if name[varBinds] begin[:]
call[name[cmdgen].getCmd, parameter[name[snmpEngine], name[authData], name[transportTarget], name[contextData], <ast.Starred object at 0x7da20c76ec50>]]
call[name[snmpEngine].transportDispatcher.runDispatcher, parameter[]]
variable[errorIndication] assign[=] call[name[cbCtx]][constant[errorIndication]]
variable[errorStatus] assign[=] call[name[cbCtx]][constant[errorStatus]]
variable[errorIndex] assign[=] call[name[cbCtx]][constant[errorIndex]]
variable[varBinds] assign[=] call[name[cbCtx]][constant[varBinds]]
variable[varBinds] assign[=] <ast.Yield object at 0x7da20c76d4e0>
if <ast.UnaryOp object at 0x7da20c76f4f0> begin[:]
break | keyword[def] identifier[getCmd] ( identifier[snmpEngine] , identifier[authData] , identifier[transportTarget] , identifier[contextData] ,
* identifier[varBinds] ,** identifier[options] ):
literal[string]
keyword[def] identifier[cbFun] ( identifier[snmpEngine] , identifier[sendRequestHandle] ,
identifier[errorIndication] , identifier[errorStatus] , identifier[errorIndex] ,
identifier[varBinds] , identifier[cbCtx] ):
identifier[cbCtx] [ literal[string] ]= identifier[errorIndication]
identifier[cbCtx] [ literal[string] ]= identifier[errorStatus]
identifier[cbCtx] [ literal[string] ]= identifier[errorIndex]
identifier[cbCtx] [ literal[string] ]= identifier[varBinds]
identifier[cbCtx] ={}
keyword[while] keyword[True] :
keyword[if] identifier[varBinds] :
identifier[cmdgen] . identifier[getCmd] ( identifier[snmpEngine] , identifier[authData] , identifier[transportTarget] ,
identifier[contextData] ,* identifier[varBinds] ,
identifier[cbFun] = identifier[cbFun] , identifier[cbCtx] = identifier[cbCtx] ,
identifier[lookupMib] = identifier[options] . identifier[get] ( literal[string] , keyword[True] ))
identifier[snmpEngine] . identifier[transportDispatcher] . identifier[runDispatcher] ()
identifier[errorIndication] = identifier[cbCtx] [ literal[string] ]
identifier[errorStatus] = identifier[cbCtx] [ literal[string] ]
identifier[errorIndex] = identifier[cbCtx] [ literal[string] ]
identifier[varBinds] = identifier[cbCtx] [ literal[string] ]
keyword[else] :
identifier[errorIndication] = identifier[errorStatus] = identifier[errorIndex] = keyword[None]
identifier[varBinds] =[]
identifier[varBinds] =( keyword[yield] identifier[errorIndication] , identifier[errorStatus] , identifier[errorIndex] , identifier[varBinds] )
keyword[if] keyword[not] identifier[varBinds] :
keyword[break] | def getCmd(snmpEngine, authData, transportTarget, contextData, *varBinds, **options):
"""Creates a generator to perform one or more SNMP GET queries.
On each iteration, new SNMP GET request is send (:RFC:`1905#section-4.2.1`).
The iterator blocks waiting for response to arrive or error to occur.
Parameters
----------
snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine`
Class instance representing SNMP engine.
authData : :py:class:`~pysnmp.hlapi.CommunityData` or :py:class:`~pysnmp.hlapi.UsmUserData`
Class instance representing SNMP credentials.
transportTarget : :py:class:`~pysnmp.hlapi.asyncore.UdpTransportTarget` or :py:class:`~pysnmp.hlapi.asyncore.Udp6TransportTarget`
Class instance representing transport type along with SNMP peer address.
contextData : :py:class:`~pysnmp.hlapi.ContextData`
Class instance representing SNMP ContextEngineId and ContextName values.
\\*varBinds : :py:class:`~pysnmp.smi.rfc1902.ObjectType`
One or more class instances representing MIB variables to place
into SNMP request.
Other Parameters
----------------
\\*\\*options :
Request options:
* `lookupMib` - load MIB and resolve response MIB variables at
the cost of slightly reduced performance. Default is `True`.
Yields
------
errorIndication : str
True value indicates SNMP engine error.
errorStatus : str
True value indicates SNMP PDU error.
errorIndex : int
Non-zero value refers to `varBinds[errorIndex-1]`
varBinds : tuple
A sequence of :py:class:`~pysnmp.smi.rfc1902.ObjectType` class
instances representing MIB variables returned in SNMP response.
Raises
------
PySnmpError
Or its derivative indicating that an error occurred while
performing SNMP operation.
Notes
-----
The `getCmd` generator will be exhausted immediately unless
a new sequence of `varBinds` are send back into running generator
(supported since Python 2.6).
Examples
--------
>>> from pysnmp.hlapi import *
>>> g = getCmd(SnmpEngine(),
... CommunityData('public'),
... UdpTransportTarget(('demo.snmplabs.com', 161)),
... ContextData(),
... ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr', 0)))
>>> next(g)
(None, 0, 0, [ObjectType(ObjectIdentity(ObjectName('1.3.6.1.2.1.1.1.0')), DisplayString('SunOS zeus.snmplabs.com 4.1.3_U1 1 sun4m'))])
>>>
"""
# noinspection PyShadowingNames
def cbFun(snmpEngine, sendRequestHandle, errorIndication, errorStatus, errorIndex, varBinds, cbCtx):
cbCtx['errorIndication'] = errorIndication
cbCtx['errorStatus'] = errorStatus
cbCtx['errorIndex'] = errorIndex
cbCtx['varBinds'] = varBinds
cbCtx = {}
while True:
if varBinds:
cmdgen.getCmd(snmpEngine, authData, transportTarget, contextData, *varBinds, cbFun=cbFun, cbCtx=cbCtx, lookupMib=options.get('lookupMib', True))
snmpEngine.transportDispatcher.runDispatcher()
errorIndication = cbCtx['errorIndication']
errorStatus = cbCtx['errorStatus']
errorIndex = cbCtx['errorIndex']
varBinds = cbCtx['varBinds'] # depends on [control=['if'], data=[]]
else:
errorIndication = errorStatus = errorIndex = None
varBinds = []
varBinds = (yield (errorIndication, errorStatus, errorIndex, varBinds))
if not varBinds:
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def getElementConf(self, elementKw, raw=False):
""" return configuration for given element keyword,
e.g. getElementConf('Q01') should return dict: {u'k1': 0.0, u'l': 0.05}
:param elementKw: element keyword
"""
if raw is True:
try:
econf = self.all_elements.get(elementKw.upper())
except:
return {}
else:
try:
econf = list(self.all_elements.get(elementKw.upper()).values())[0]
except:
return {}
return econf | def function[getElementConf, parameter[self, elementKw, raw]]:
constant[ return configuration for given element keyword,
e.g. getElementConf('Q01') should return dict: {u'k1': 0.0, u'l': 0.05}
:param elementKw: element keyword
]
if compare[name[raw] is constant[True]] begin[:]
<ast.Try object at 0x7da1b0804dc0>
return[name[econf]] | keyword[def] identifier[getElementConf] ( identifier[self] , identifier[elementKw] , identifier[raw] = keyword[False] ):
literal[string]
keyword[if] identifier[raw] keyword[is] keyword[True] :
keyword[try] :
identifier[econf] = identifier[self] . identifier[all_elements] . identifier[get] ( identifier[elementKw] . identifier[upper] ())
keyword[except] :
keyword[return] {}
keyword[else] :
keyword[try] :
identifier[econf] = identifier[list] ( identifier[self] . identifier[all_elements] . identifier[get] ( identifier[elementKw] . identifier[upper] ()). identifier[values] ())[ literal[int] ]
keyword[except] :
keyword[return] {}
keyword[return] identifier[econf] | def getElementConf(self, elementKw, raw=False):
""" return configuration for given element keyword,
e.g. getElementConf('Q01') should return dict: {u'k1': 0.0, u'l': 0.05}
:param elementKw: element keyword
"""
if raw is True:
try:
econf = self.all_elements.get(elementKw.upper()) # depends on [control=['try'], data=[]]
except:
return {} # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
try:
econf = list(self.all_elements.get(elementKw.upper()).values())[0] # depends on [control=['try'], data=[]]
except:
return {} # depends on [control=['except'], data=[]]
return econf |
def _construct_email(self, email, **extra):
"""
Converts incoming data to properly structured dictionary.
"""
if isinstance(email, dict):
email = Email(manager=self._manager, **email)
elif isinstance(email, (MIMEText, MIMEMultipart)):
email = Email.from_mime(email, self._manager)
elif not isinstance(email, Email):
raise ValueError
email._update(extra)
return email.as_dict() | def function[_construct_email, parameter[self, email]]:
constant[
Converts incoming data to properly structured dictionary.
]
if call[name[isinstance], parameter[name[email], name[dict]]] begin[:]
variable[email] assign[=] call[name[Email], parameter[]]
call[name[email]._update, parameter[name[extra]]]
return[call[name[email].as_dict, parameter[]]] | keyword[def] identifier[_construct_email] ( identifier[self] , identifier[email] ,** identifier[extra] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[email] , identifier[dict] ):
identifier[email] = identifier[Email] ( identifier[manager] = identifier[self] . identifier[_manager] ,** identifier[email] )
keyword[elif] identifier[isinstance] ( identifier[email] ,( identifier[MIMEText] , identifier[MIMEMultipart] )):
identifier[email] = identifier[Email] . identifier[from_mime] ( identifier[email] , identifier[self] . identifier[_manager] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[email] , identifier[Email] ):
keyword[raise] identifier[ValueError]
identifier[email] . identifier[_update] ( identifier[extra] )
keyword[return] identifier[email] . identifier[as_dict] () | def _construct_email(self, email, **extra):
"""
Converts incoming data to properly structured dictionary.
"""
if isinstance(email, dict):
email = Email(manager=self._manager, **email) # depends on [control=['if'], data=[]]
elif isinstance(email, (MIMEText, MIMEMultipart)):
email = Email.from_mime(email, self._manager) # depends on [control=['if'], data=[]]
elif not isinstance(email, Email):
raise ValueError # depends on [control=['if'], data=[]]
email._update(extra)
return email.as_dict() |
def filter(self, filter_function):
"""Return a new Streamlet containing only the elements that satisfy filter_function
"""
from heronpy.streamlet.impl.filterbolt import FilterStreamlet
filter_streamlet = FilterStreamlet(filter_function, self)
self._add_child(filter_streamlet)
return filter_streamlet | def function[filter, parameter[self, filter_function]]:
constant[Return a new Streamlet containing only the elements that satisfy filter_function
]
from relative_module[heronpy.streamlet.impl.filterbolt] import module[FilterStreamlet]
variable[filter_streamlet] assign[=] call[name[FilterStreamlet], parameter[name[filter_function], name[self]]]
call[name[self]._add_child, parameter[name[filter_streamlet]]]
return[name[filter_streamlet]] | keyword[def] identifier[filter] ( identifier[self] , identifier[filter_function] ):
literal[string]
keyword[from] identifier[heronpy] . identifier[streamlet] . identifier[impl] . identifier[filterbolt] keyword[import] identifier[FilterStreamlet]
identifier[filter_streamlet] = identifier[FilterStreamlet] ( identifier[filter_function] , identifier[self] )
identifier[self] . identifier[_add_child] ( identifier[filter_streamlet] )
keyword[return] identifier[filter_streamlet] | def filter(self, filter_function):
"""Return a new Streamlet containing only the elements that satisfy filter_function
"""
from heronpy.streamlet.impl.filterbolt import FilterStreamlet
filter_streamlet = FilterStreamlet(filter_function, self)
self._add_child(filter_streamlet)
return filter_streamlet |
def do_restart(self, line):
"""
Attempt to restart the bot.
"""
self.bot._frame = 0
self.bot._namespace.clear()
self.bot._namespace.update(self.bot._initial_namespace) | def function[do_restart, parameter[self, line]]:
constant[
Attempt to restart the bot.
]
name[self].bot._frame assign[=] constant[0]
call[name[self].bot._namespace.clear, parameter[]]
call[name[self].bot._namespace.update, parameter[name[self].bot._initial_namespace]] | keyword[def] identifier[do_restart] ( identifier[self] , identifier[line] ):
literal[string]
identifier[self] . identifier[bot] . identifier[_frame] = literal[int]
identifier[self] . identifier[bot] . identifier[_namespace] . identifier[clear] ()
identifier[self] . identifier[bot] . identifier[_namespace] . identifier[update] ( identifier[self] . identifier[bot] . identifier[_initial_namespace] ) | def do_restart(self, line):
"""
Attempt to restart the bot.
"""
self.bot._frame = 0
self.bot._namespace.clear()
self.bot._namespace.update(self.bot._initial_namespace) |
def connection(self):
"""The :class:`pika.BlockingConnection` for the current
thread. This property may change without notice.
"""
connection = getattr(self.state, "connection", None)
if connection is None:
connection = self.state.connection = pika.BlockingConnection(
parameters=self.parameters)
self.connections.add(connection)
return connection | def function[connection, parameter[self]]:
constant[The :class:`pika.BlockingConnection` for the current
thread. This property may change without notice.
]
variable[connection] assign[=] call[name[getattr], parameter[name[self].state, constant[connection], constant[None]]]
if compare[name[connection] is constant[None]] begin[:]
variable[connection] assign[=] call[name[pika].BlockingConnection, parameter[]]
call[name[self].connections.add, parameter[name[connection]]]
return[name[connection]] | keyword[def] identifier[connection] ( identifier[self] ):
literal[string]
identifier[connection] = identifier[getattr] ( identifier[self] . identifier[state] , literal[string] , keyword[None] )
keyword[if] identifier[connection] keyword[is] keyword[None] :
identifier[connection] = identifier[self] . identifier[state] . identifier[connection] = identifier[pika] . identifier[BlockingConnection] (
identifier[parameters] = identifier[self] . identifier[parameters] )
identifier[self] . identifier[connections] . identifier[add] ( identifier[connection] )
keyword[return] identifier[connection] | def connection(self):
"""The :class:`pika.BlockingConnection` for the current
thread. This property may change without notice.
"""
connection = getattr(self.state, 'connection', None)
if connection is None:
connection = self.state.connection = pika.BlockingConnection(parameters=self.parameters)
self.connections.add(connection) # depends on [control=['if'], data=['connection']]
return connection |
def GetRootKey(self):
"""Retrieves the root key.
Returns:
WinRegistryKey: Windows Registry root key or None if not available.
"""
regf_key = self._regf_file.get_root_key()
if not regf_key:
return None
return REGFWinRegistryKey(regf_key, key_path=self._key_path_prefix) | def function[GetRootKey, parameter[self]]:
constant[Retrieves the root key.
Returns:
WinRegistryKey: Windows Registry root key or None if not available.
]
variable[regf_key] assign[=] call[name[self]._regf_file.get_root_key, parameter[]]
if <ast.UnaryOp object at 0x7da2044c1ff0> begin[:]
return[constant[None]]
return[call[name[REGFWinRegistryKey], parameter[name[regf_key]]]] | keyword[def] identifier[GetRootKey] ( identifier[self] ):
literal[string]
identifier[regf_key] = identifier[self] . identifier[_regf_file] . identifier[get_root_key] ()
keyword[if] keyword[not] identifier[regf_key] :
keyword[return] keyword[None]
keyword[return] identifier[REGFWinRegistryKey] ( identifier[regf_key] , identifier[key_path] = identifier[self] . identifier[_key_path_prefix] ) | def GetRootKey(self):
"""Retrieves the root key.
Returns:
WinRegistryKey: Windows Registry root key or None if not available.
"""
regf_key = self._regf_file.get_root_key()
if not regf_key:
return None # depends on [control=['if'], data=[]]
return REGFWinRegistryKey(regf_key, key_path=self._key_path_prefix) |
def security(self):
"""
Creates a reference to the Security operations for Portal
"""
url = self._url + "/security"
return _Security(url=url,
securityHandler=self._securityHandler,
proxy_url=self._proxy_url,
proxy_port=self._proxy_port) | def function[security, parameter[self]]:
constant[
Creates a reference to the Security operations for Portal
]
variable[url] assign[=] binary_operation[name[self]._url + constant[/security]]
return[call[name[_Security], parameter[]]] | keyword[def] identifier[security] ( identifier[self] ):
literal[string]
identifier[url] = identifier[self] . identifier[_url] + literal[string]
keyword[return] identifier[_Security] ( identifier[url] = identifier[url] ,
identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ,
identifier[proxy_url] = identifier[self] . identifier[_proxy_url] ,
identifier[proxy_port] = identifier[self] . identifier[_proxy_port] ) | def security(self):
"""
Creates a reference to the Security operations for Portal
"""
url = self._url + '/security'
return _Security(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port) |
def of_project(project: 'projects.Project') -> dict:
"""
Returns the file status information for every file within the project
source directory and its shared library folders.
:param project:
The project for which the status information should be generated
:return:
A dictionary containing:
- project: the status information for all files within
the projects source directory
- libraries: a list of status information dictionaries for all
files within each of the project's library directories. If a
library resides within the project source directory, the entry
will be an empty dictionary to prevent duplication.
"""
source_directory = project.source_directory
libraries_status = [
{} if d.startswith(source_directory) else of_directory(d)
for d in project.library_directories
]
return dict(
project=of_directory(source_directory),
libraries=libraries_status
) | def function[of_project, parameter[project]]:
constant[
Returns the file status information for every file within the project
source directory and its shared library folders.
:param project:
The project for which the status information should be generated
:return:
A dictionary containing:
- project: the status information for all files within
the projects source directory
- libraries: a list of status information dictionaries for all
files within each of the project's library directories. If a
library resides within the project source directory, the entry
will be an empty dictionary to prevent duplication.
]
variable[source_directory] assign[=] name[project].source_directory
variable[libraries_status] assign[=] <ast.ListComp object at 0x7da18c4cee60>
return[call[name[dict], parameter[]]] | keyword[def] identifier[of_project] ( identifier[project] : literal[string] )-> identifier[dict] :
literal[string]
identifier[source_directory] = identifier[project] . identifier[source_directory]
identifier[libraries_status] =[
{} keyword[if] identifier[d] . identifier[startswith] ( identifier[source_directory] ) keyword[else] identifier[of_directory] ( identifier[d] )
keyword[for] identifier[d] keyword[in] identifier[project] . identifier[library_directories]
]
keyword[return] identifier[dict] (
identifier[project] = identifier[of_directory] ( identifier[source_directory] ),
identifier[libraries] = identifier[libraries_status]
) | def of_project(project: 'projects.Project') -> dict:
"""
Returns the file status information for every file within the project
source directory and its shared library folders.
:param project:
The project for which the status information should be generated
:return:
A dictionary containing:
- project: the status information for all files within
the projects source directory
- libraries: a list of status information dictionaries for all
files within each of the project's library directories. If a
library resides within the project source directory, the entry
will be an empty dictionary to prevent duplication.
"""
source_directory = project.source_directory
libraries_status = [{} if d.startswith(source_directory) else of_directory(d) for d in project.library_directories]
return dict(project=of_directory(source_directory), libraries=libraries_status) |
def alias_feed(name, alias):
"""write aliases to db"""
with Database("aliases") as db:
if alias in db:
print("Something has gone horribly wrong with your aliases! Try deleting the %s entry." % name)
return
else:
db[alias] = name | def function[alias_feed, parameter[name, alias]]:
constant[write aliases to db]
with call[name[Database], parameter[constant[aliases]]] begin[:]
if compare[name[alias] in name[db]] begin[:]
call[name[print], parameter[binary_operation[constant[Something has gone horribly wrong with your aliases! Try deleting the %s entry.] <ast.Mod object at 0x7da2590d6920> name[name]]]]
return[None] | keyword[def] identifier[alias_feed] ( identifier[name] , identifier[alias] ):
literal[string]
keyword[with] identifier[Database] ( literal[string] ) keyword[as] identifier[db] :
keyword[if] identifier[alias] keyword[in] identifier[db] :
identifier[print] ( literal[string] % identifier[name] )
keyword[return]
keyword[else] :
identifier[db] [ identifier[alias] ]= identifier[name] | def alias_feed(name, alias):
"""write aliases to db"""
with Database('aliases') as db:
if alias in db:
print('Something has gone horribly wrong with your aliases! Try deleting the %s entry.' % name)
return # depends on [control=['if'], data=[]]
else:
db[alias] = name # depends on [control=['with'], data=['db']] |
def get_os_dist_info():
"""
Returns the distribution info
"""
distribution = platform.dist()
dist_name = distribution[0].lower()
dist_version_str = distribution[1]
if dist_name and dist_version_str:
return dist_name, dist_version_str
else:
return None, None | def function[get_os_dist_info, parameter[]]:
constant[
Returns the distribution info
]
variable[distribution] assign[=] call[name[platform].dist, parameter[]]
variable[dist_name] assign[=] call[call[name[distribution]][constant[0]].lower, parameter[]]
variable[dist_version_str] assign[=] call[name[distribution]][constant[1]]
if <ast.BoolOp object at 0x7da1b28455d0> begin[:]
return[tuple[[<ast.Name object at 0x7da1b2845c30>, <ast.Name object at 0x7da1b2847a00>]]] | keyword[def] identifier[get_os_dist_info] ():
literal[string]
identifier[distribution] = identifier[platform] . identifier[dist] ()
identifier[dist_name] = identifier[distribution] [ literal[int] ]. identifier[lower] ()
identifier[dist_version_str] = identifier[distribution] [ literal[int] ]
keyword[if] identifier[dist_name] keyword[and] identifier[dist_version_str] :
keyword[return] identifier[dist_name] , identifier[dist_version_str]
keyword[else] :
keyword[return] keyword[None] , keyword[None] | def get_os_dist_info():
"""
Returns the distribution info
"""
distribution = platform.dist()
dist_name = distribution[0].lower()
dist_version_str = distribution[1]
if dist_name and dist_version_str:
return (dist_name, dist_version_str) # depends on [control=['if'], data=[]]
else:
return (None, None) |
def partition_agent(host):
""" Partition a node from all network traffic except for SSH and loopback
:param hostname: host or IP of the machine to partition from the cluster
"""
network.save_iptables(host)
network.flush_all_rules(host)
network.allow_all_traffic(host)
network.run_iptables(host, ALLOW_SSH)
network.run_iptables(host, ALLOW_PING)
network.run_iptables(host, DISALLOW_MESOS)
network.run_iptables(host, DISALLOW_INPUT) | def function[partition_agent, parameter[host]]:
constant[ Partition a node from all network traffic except for SSH and loopback
:param hostname: host or IP of the machine to partition from the cluster
]
call[name[network].save_iptables, parameter[name[host]]]
call[name[network].flush_all_rules, parameter[name[host]]]
call[name[network].allow_all_traffic, parameter[name[host]]]
call[name[network].run_iptables, parameter[name[host], name[ALLOW_SSH]]]
call[name[network].run_iptables, parameter[name[host], name[ALLOW_PING]]]
call[name[network].run_iptables, parameter[name[host], name[DISALLOW_MESOS]]]
call[name[network].run_iptables, parameter[name[host], name[DISALLOW_INPUT]]] | keyword[def] identifier[partition_agent] ( identifier[host] ):
literal[string]
identifier[network] . identifier[save_iptables] ( identifier[host] )
identifier[network] . identifier[flush_all_rules] ( identifier[host] )
identifier[network] . identifier[allow_all_traffic] ( identifier[host] )
identifier[network] . identifier[run_iptables] ( identifier[host] , identifier[ALLOW_SSH] )
identifier[network] . identifier[run_iptables] ( identifier[host] , identifier[ALLOW_PING] )
identifier[network] . identifier[run_iptables] ( identifier[host] , identifier[DISALLOW_MESOS] )
identifier[network] . identifier[run_iptables] ( identifier[host] , identifier[DISALLOW_INPUT] ) | def partition_agent(host):
""" Partition a node from all network traffic except for SSH and loopback
:param hostname: host or IP of the machine to partition from the cluster
"""
network.save_iptables(host)
network.flush_all_rules(host)
network.allow_all_traffic(host)
network.run_iptables(host, ALLOW_SSH)
network.run_iptables(host, ALLOW_PING)
network.run_iptables(host, DISALLOW_MESOS)
network.run_iptables(host, DISALLOW_INPUT) |
def _already_in_album(self,fullfile,pid,album_id):
"""Check to see if photo with given pid is already
in the album_id, returns true if this is the case
"""
logger.debug("fb: Checking if pid %s in album %s",pid,album_id)
pid_in_album=[]
# Get all photos in album
photos = self.fb.get_connections(str(album_id),"photos")['data']
# Get all pids in fb album
for photo in photos:
pid_in_album.append(photo['id'])
logger.debug("fb: album %d contains these photos: %s",album_id,pid_in_album)
# Check if our pid matches
if pid in pid_in_album:
return True
return False | def function[_already_in_album, parameter[self, fullfile, pid, album_id]]:
constant[Check to see if photo with given pid is already
in the album_id, returns true if this is the case
]
call[name[logger].debug, parameter[constant[fb: Checking if pid %s in album %s], name[pid], name[album_id]]]
variable[pid_in_album] assign[=] list[[]]
variable[photos] assign[=] call[call[name[self].fb.get_connections, parameter[call[name[str], parameter[name[album_id]]], constant[photos]]]][constant[data]]
for taget[name[photo]] in starred[name[photos]] begin[:]
call[name[pid_in_album].append, parameter[call[name[photo]][constant[id]]]]
call[name[logger].debug, parameter[constant[fb: album %d contains these photos: %s], name[album_id], name[pid_in_album]]]
if compare[name[pid] in name[pid_in_album]] begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[_already_in_album] ( identifier[self] , identifier[fullfile] , identifier[pid] , identifier[album_id] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[pid] , identifier[album_id] )
identifier[pid_in_album] =[]
identifier[photos] = identifier[self] . identifier[fb] . identifier[get_connections] ( identifier[str] ( identifier[album_id] ), literal[string] )[ literal[string] ]
keyword[for] identifier[photo] keyword[in] identifier[photos] :
identifier[pid_in_album] . identifier[append] ( identifier[photo] [ literal[string] ])
identifier[logger] . identifier[debug] ( literal[string] , identifier[album_id] , identifier[pid_in_album] )
keyword[if] identifier[pid] keyword[in] identifier[pid_in_album] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def _already_in_album(self, fullfile, pid, album_id):
"""Check to see if photo with given pid is already
in the album_id, returns true if this is the case
"""
logger.debug('fb: Checking if pid %s in album %s', pid, album_id)
pid_in_album = []
# Get all photos in album
photos = self.fb.get_connections(str(album_id), 'photos')['data']
# Get all pids in fb album
for photo in photos:
pid_in_album.append(photo['id']) # depends on [control=['for'], data=['photo']]
logger.debug('fb: album %d contains these photos: %s', album_id, pid_in_album)
# Check if our pid matches
if pid in pid_in_album:
return True # depends on [control=['if'], data=[]]
return False |
def _body_builder(self, kwargs):
"""
Helper method to construct the appropriate SOAP-body to call a
FritzBox-Service.
"""
p = {
'action_name': self.name,
'service_type': self.service_type,
'arguments': '',
}
if kwargs:
arguments = [
self.argument_template % {'name': k, 'value': v}
for k, v in kwargs.items()
]
p['arguments'] = ''.join(arguments)
body = self.body_template.strip() % p
return body | def function[_body_builder, parameter[self, kwargs]]:
constant[
Helper method to construct the appropriate SOAP-body to call a
FritzBox-Service.
]
variable[p] assign[=] dictionary[[<ast.Constant object at 0x7da1b1040d90>, <ast.Constant object at 0x7da1b10402b0>, <ast.Constant object at 0x7da1b10401c0>], [<ast.Attribute object at 0x7da1b1042170>, <ast.Attribute object at 0x7da1b1042110>, <ast.Constant object at 0x7da1b1041ae0>]]
if name[kwargs] begin[:]
variable[arguments] assign[=] <ast.ListComp object at 0x7da1b1041630>
call[name[p]][constant[arguments]] assign[=] call[constant[].join, parameter[name[arguments]]]
variable[body] assign[=] binary_operation[call[name[self].body_template.strip, parameter[]] <ast.Mod object at 0x7da2590d6920> name[p]]
return[name[body]] | keyword[def] identifier[_body_builder] ( identifier[self] , identifier[kwargs] ):
literal[string]
identifier[p] ={
literal[string] : identifier[self] . identifier[name] ,
literal[string] : identifier[self] . identifier[service_type] ,
literal[string] : literal[string] ,
}
keyword[if] identifier[kwargs] :
identifier[arguments] =[
identifier[self] . identifier[argument_template] %{ literal[string] : identifier[k] , literal[string] : identifier[v] }
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ()
]
identifier[p] [ literal[string] ]= literal[string] . identifier[join] ( identifier[arguments] )
identifier[body] = identifier[self] . identifier[body_template] . identifier[strip] ()% identifier[p]
keyword[return] identifier[body] | def _body_builder(self, kwargs):
"""
Helper method to construct the appropriate SOAP-body to call a
FritzBox-Service.
"""
p = {'action_name': self.name, 'service_type': self.service_type, 'arguments': ''}
if kwargs:
arguments = [self.argument_template % {'name': k, 'value': v} for (k, v) in kwargs.items()]
p['arguments'] = ''.join(arguments) # depends on [control=['if'], data=[]]
body = self.body_template.strip() % p
return body |
def update(self, title, key):
"""Update this key.
:param str title: (required), title of the key
:param str key: (required), text of the key file
:returns: bool
"""
json = None
if title and key:
data = {'title': title, 'key': key}
json = self._json(self._patch(self._api, data=dumps(data)), 200)
if json:
self._update_(json)
return True
return False | def function[update, parameter[self, title, key]]:
constant[Update this key.
:param str title: (required), title of the key
:param str key: (required), text of the key file
:returns: bool
]
variable[json] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b0fef790> begin[:]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fee620>, <ast.Constant object at 0x7da1b0fee800>], [<ast.Name object at 0x7da1b0fef640>, <ast.Name object at 0x7da1b0fef6a0>]]
variable[json] assign[=] call[name[self]._json, parameter[call[name[self]._patch, parameter[name[self]._api]], constant[200]]]
if name[json] begin[:]
call[name[self]._update_, parameter[name[json]]]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[update] ( identifier[self] , identifier[title] , identifier[key] ):
literal[string]
identifier[json] = keyword[None]
keyword[if] identifier[title] keyword[and] identifier[key] :
identifier[data] ={ literal[string] : identifier[title] , literal[string] : identifier[key] }
identifier[json] = identifier[self] . identifier[_json] ( identifier[self] . identifier[_patch] ( identifier[self] . identifier[_api] , identifier[data] = identifier[dumps] ( identifier[data] )), literal[int] )
keyword[if] identifier[json] :
identifier[self] . identifier[_update_] ( identifier[json] )
keyword[return] keyword[True]
keyword[return] keyword[False] | def update(self, title, key):
"""Update this key.
:param str title: (required), title of the key
:param str key: (required), text of the key file
:returns: bool
"""
json = None
if title and key:
data = {'title': title, 'key': key}
json = self._json(self._patch(self._api, data=dumps(data)), 200) # depends on [control=['if'], data=[]]
if json:
self._update_(json)
return True # depends on [control=['if'], data=[]]
return False |
def envelope(component, **kwargs):
"""
Create parameters for an envelope (usually will be attached to two stars solRad
that they can share a common-envelope)
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_component`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s
"""
params = []
params += [FloatParameter(qualifier='abun', value=kwargs.get('abun', 0.), default_unit=u.dimensionless_unscaled, description='Metallicity')] # TODO: correct units??? check if log or not? (logabun = 0)
# params += [FloatParameter(qualifier='gravb_bol', value=kwargs.get('gravb_bol', 0.32), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='Bolometric gravity brightening')]
# params += [FloatParameter(qualifier='frac_refl_bol', value=kwargs.get('frac_refl_bol', 0.0), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='ratio of incident bolometric light that is used for reflection (heating without redistribution)')]
# params += [FloatParameter(qualifier='frac_heat_bol', value=kwargs.get('frac_heat_bol', 0.0), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='ratio of incident bolometric light that is used for heating')]
# params += [FloatParameter(qualifier='frac_scatt_bol', value=kwargs.get('frac_scatt_bol', 0.0), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='ratio of bolometric incident light that is scattered')]
# params += [FloatParameter(qualifier='frac_lost_bol', value=kwargs.get('frac_lost_bol', 1.0), default_unit=u.dimensionless_unscaled, limits=(0.0, 1.0), description='ratio of incident bolometric light that is lost/ignored')]
params += [FloatParameter(qualifier='fillout_factor', value=kwargs.get('fillout_factor', 0.5), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='Fillout-factor of the envelope')]
params += [FloatParameter(qualifier='pot', value=kwargs.get('pot', 3.5), default_unit=u.dimensionless_unscaled, limits=(0.0,None), description='Potential of the envelope (from the primary component\s reference)')]
params += [FloatParameter(qualifier='pot_min', value=kwargs.get('pot_min', 3.5), default_unit=u.dimensionless_unscaled, limits=(0.0,None), description='Critical (minimum) value of the potential to remain a contact')]
params += [FloatParameter(qualifier='pot_max', value=kwargs.get('pot_max', 3.5), default_unit=u.dimensionless_unscaled, limits=(0.0,None), description='Critical (maximum) value of the potential to remain a contact')]
# params += [FloatParameter(qualifier='intens_coeff1', value=kwargs.get('intens_coeff1', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff2', value=kwargs.get('intens_coeff2', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff3', value=kwargs.get('intens_coeff3', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff4', value=kwargs.get('intens_coeff4', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff5', value=kwargs.get('intens_coeff5', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [ChoiceParameter(qualifier='ld_func_bol', value=kwargs.get('ld_func_bol', 'logarithmic'), choices=_ld_func_choices_no_interp, description='Bolometric limb darkening model')]
# params += [FloatArrayParameter(qualifier='ld_coeffs_bol', value=kwargs.get('ld_coeffs_bol', [0.5, 0.5]), default_unit=u.dimensionless_unscaled, description='Bolometric limb darkening coefficients')]
constraints = []
# constraints handled by set hierarchy:
# potential_contact_min/max
# requiv_contact_min/max
return ParameterSet(params), constraints | def function[envelope, parameter[component]]:
constant[
Create parameters for an envelope (usually will be attached to two stars solRad
that they can share a common-envelope)
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_component`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s
]
variable[params] assign[=] list[[]]
<ast.AugAssign object at 0x7da18f813610>
<ast.AugAssign object at 0x7da18f811120>
<ast.AugAssign object at 0x7da18f812b60>
<ast.AugAssign object at 0x7da2054a5c30>
<ast.AugAssign object at 0x7da2054a70d0>
variable[constraints] assign[=] list[[]]
return[tuple[[<ast.Call object at 0x7da2054a6710>, <ast.Name object at 0x7da2054a7c10>]]] | keyword[def] identifier[envelope] ( identifier[component] ,** identifier[kwargs] ):
literal[string]
identifier[params] =[]
identifier[params] +=[ identifier[FloatParameter] ( identifier[qualifier] = literal[string] , identifier[value] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ), identifier[default_unit] = identifier[u] . identifier[dimensionless_unscaled] , identifier[description] = literal[string] )]
identifier[params] +=[ identifier[FloatParameter] ( identifier[qualifier] = literal[string] , identifier[value] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ), identifier[default_unit] = identifier[u] . identifier[dimensionless_unscaled] , identifier[limits] =( literal[int] , literal[int] ), identifier[description] = literal[string] )]
identifier[params] +=[ identifier[FloatParameter] ( identifier[qualifier] = literal[string] , identifier[value] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ), identifier[default_unit] = identifier[u] . identifier[dimensionless_unscaled] , identifier[limits] =( literal[int] , keyword[None] ), identifier[description] = literal[string] )]
identifier[params] +=[ identifier[FloatParameter] ( identifier[qualifier] = literal[string] , identifier[value] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ), identifier[default_unit] = identifier[u] . identifier[dimensionless_unscaled] , identifier[limits] =( literal[int] , keyword[None] ), identifier[description] = literal[string] )]
identifier[params] +=[ identifier[FloatParameter] ( identifier[qualifier] = literal[string] , identifier[value] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ), identifier[default_unit] = identifier[u] . identifier[dimensionless_unscaled] , identifier[limits] =( literal[int] , keyword[None] ), identifier[description] = literal[string] )]
identifier[constraints] =[]
keyword[return] identifier[ParameterSet] ( identifier[params] ), identifier[constraints] | def envelope(component, **kwargs):
"""
Create parameters for an envelope (usually will be attached to two stars solRad
that they can share a common-envelope)
Generally, this will be used as an input to the kind argument in
:meth:`phoebe.frontend.bundle.Bundle.add_component`
:parameter **kwargs: defaults for the values of any of the parameters
:return: a :class:`phoebe.parameters.parameters.ParameterSet` of all newly
created :class:`phoebe.parameters.parameters.Parameter`s
"""
params = []
params += [FloatParameter(qualifier='abun', value=kwargs.get('abun', 0.0), default_unit=u.dimensionless_unscaled, description='Metallicity')] # TODO: correct units??? check if log or not? (logabun = 0)
# params += [FloatParameter(qualifier='gravb_bol', value=kwargs.get('gravb_bol', 0.32), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='Bolometric gravity brightening')]
# params += [FloatParameter(qualifier='frac_refl_bol', value=kwargs.get('frac_refl_bol', 0.0), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='ratio of incident bolometric light that is used for reflection (heating without redistribution)')]
# params += [FloatParameter(qualifier='frac_heat_bol', value=kwargs.get('frac_heat_bol', 0.0), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='ratio of incident bolometric light that is used for heating')]
# params += [FloatParameter(qualifier='frac_scatt_bol', value=kwargs.get('frac_scatt_bol', 0.0), default_unit=u.dimensionless_unscaled, limits=(0.0,1.0), description='ratio of bolometric incident light that is scattered')]
# params += [FloatParameter(qualifier='frac_lost_bol', value=kwargs.get('frac_lost_bol', 1.0), default_unit=u.dimensionless_unscaled, limits=(0.0, 1.0), description='ratio of incident bolometric light that is lost/ignored')]
params += [FloatParameter(qualifier='fillout_factor', value=kwargs.get('fillout_factor', 0.5), default_unit=u.dimensionless_unscaled, limits=(0.0, 1.0), description='Fillout-factor of the envelope')]
params += [FloatParameter(qualifier='pot', value=kwargs.get('pot', 3.5), default_unit=u.dimensionless_unscaled, limits=(0.0, None), description='Potential of the envelope (from the primary component\\s reference)')]
params += [FloatParameter(qualifier='pot_min', value=kwargs.get('pot_min', 3.5), default_unit=u.dimensionless_unscaled, limits=(0.0, None), description='Critical (minimum) value of the potential to remain a contact')]
params += [FloatParameter(qualifier='pot_max', value=kwargs.get('pot_max', 3.5), default_unit=u.dimensionless_unscaled, limits=(0.0, None), description='Critical (maximum) value of the potential to remain a contact')]
# params += [FloatParameter(qualifier='intens_coeff1', value=kwargs.get('intens_coeff1', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff2', value=kwargs.get('intens_coeff2', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff3', value=kwargs.get('intens_coeff3', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff4', value=kwargs.get('intens_coeff4', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [FloatParameter(qualifier='intens_coeff5', value=kwargs.get('intens_coeff5', 1.0), default_unit=u.dimensionless_unscaled, description='')]
# params += [ChoiceParameter(qualifier='ld_func_bol', value=kwargs.get('ld_func_bol', 'logarithmic'), choices=_ld_func_choices_no_interp, description='Bolometric limb darkening model')]
# params += [FloatArrayParameter(qualifier='ld_coeffs_bol', value=kwargs.get('ld_coeffs_bol', [0.5, 0.5]), default_unit=u.dimensionless_unscaled, description='Bolometric limb darkening coefficients')]
constraints = []
# constraints handled by set hierarchy:
# potential_contact_min/max
# requiv_contact_min/max
return (ParameterSet(params), constraints) |
def unescape_LDAP(ldap_string):
# type: (str) -> str
# pylint: disable=C0103
"""
Unespaces an LDAP string
:param ldap_string: The string to unescape
:return: The unprotected string
"""
if ldap_string is None:
return None
if ESCAPE_CHARACTER not in ldap_string:
# No need to loop
return ldap_string
escaped = False
result = ""
for character in ldap_string:
if not escaped and character == ESCAPE_CHARACTER:
# Escape character found
escaped = True
else:
# Copy the character
escaped = False
result += character
return result | def function[unescape_LDAP, parameter[ldap_string]]:
constant[
Unespaces an LDAP string
:param ldap_string: The string to unescape
:return: The unprotected string
]
if compare[name[ldap_string] is constant[None]] begin[:]
return[constant[None]]
if compare[name[ESCAPE_CHARACTER] <ast.NotIn object at 0x7da2590d7190> name[ldap_string]] begin[:]
return[name[ldap_string]]
variable[escaped] assign[=] constant[False]
variable[result] assign[=] constant[]
for taget[name[character]] in starred[name[ldap_string]] begin[:]
if <ast.BoolOp object at 0x7da20c6e6aa0> begin[:]
variable[escaped] assign[=] constant[True]
return[name[result]] | keyword[def] identifier[unescape_LDAP] ( identifier[ldap_string] ):
literal[string]
keyword[if] identifier[ldap_string] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[if] identifier[ESCAPE_CHARACTER] keyword[not] keyword[in] identifier[ldap_string] :
keyword[return] identifier[ldap_string]
identifier[escaped] = keyword[False]
identifier[result] = literal[string]
keyword[for] identifier[character] keyword[in] identifier[ldap_string] :
keyword[if] keyword[not] identifier[escaped] keyword[and] identifier[character] == identifier[ESCAPE_CHARACTER] :
identifier[escaped] = keyword[True]
keyword[else] :
identifier[escaped] = keyword[False]
identifier[result] += identifier[character]
keyword[return] identifier[result] | def unescape_LDAP(ldap_string):
# type: (str) -> str
# pylint: disable=C0103
'\n Unespaces an LDAP string\n\n :param ldap_string: The string to unescape\n :return: The unprotected string\n '
if ldap_string is None:
return None # depends on [control=['if'], data=[]]
if ESCAPE_CHARACTER not in ldap_string:
# No need to loop
return ldap_string # depends on [control=['if'], data=['ldap_string']]
escaped = False
result = ''
for character in ldap_string:
if not escaped and character == ESCAPE_CHARACTER:
# Escape character found
escaped = True # depends on [control=['if'], data=[]]
else:
# Copy the character
escaped = False
result += character # depends on [control=['for'], data=['character']]
return result |
def get_manual(self, start, end):
"""Get forecasts for a manually selected time period."""
url = build_url(self.api_key, self.spot_id, self.fields,
self.unit, start, end)
return get_msw(url) | def function[get_manual, parameter[self, start, end]]:
constant[Get forecasts for a manually selected time period.]
variable[url] assign[=] call[name[build_url], parameter[name[self].api_key, name[self].spot_id, name[self].fields, name[self].unit, name[start], name[end]]]
return[call[name[get_msw], parameter[name[url]]]] | keyword[def] identifier[get_manual] ( identifier[self] , identifier[start] , identifier[end] ):
literal[string]
identifier[url] = identifier[build_url] ( identifier[self] . identifier[api_key] , identifier[self] . identifier[spot_id] , identifier[self] . identifier[fields] ,
identifier[self] . identifier[unit] , identifier[start] , identifier[end] )
keyword[return] identifier[get_msw] ( identifier[url] ) | def get_manual(self, start, end):
"""Get forecasts for a manually selected time period."""
url = build_url(self.api_key, self.spot_id, self.fields, self.unit, start, end)
return get_msw(url) |
def intersect(self, *sets):
"""
Add a list of sets to the existing list of sets to check. Returns self
for chaining.
Each "set" represent a list of pk, the final goal is to return only pks
matching the intersection of all sets.
A "set" can be:
- a string: considered as the name of a redis set, sorted set or list
(if a list, values will be stored in a temporary set)
- a list, set or tuple: values will be stored in a temporary set
- a SetField: we will directly use it's content on redis
- a ListField or SortedSetField: values will be stored in a temporary
set (except if we want a sort or values and it's the only "set" to
use)
"""
sets_ = set()
for set_ in sets:
if isinstance(set_, (list, set)):
set_ = tuple(set_)
elif isinstance(set_, MultiValuesField) and not getattr(set_, '_instance', None):
raise ValueError('%s passed to "intersect" must be bound'
% set_.__class__.__name__)
elif not isinstance(set_, (tuple, str, MultiValuesField, _StoredCollection)):
raise ValueError('%s is not a valid type of argument that can '
'be used as a set. Allowed are: string (key '
'of a redis set, sorted set or list), '
'limpyd multi-values field ('
'SetField, ListField or SortedSetField), or '
'real python set, list or tuple' % set_)
if isinstance(set_, SortedSetField):
self._has_sortedsets = True
sets_.add(set_)
self._lazy_collection['intersects'].update(sets_)
return self | def function[intersect, parameter[self]]:
constant[
Add a list of sets to the existing list of sets to check. Returns self
for chaining.
Each "set" represent a list of pk, the final goal is to return only pks
matching the intersection of all sets.
A "set" can be:
- a string: considered as the name of a redis set, sorted set or list
(if a list, values will be stored in a temporary set)
- a list, set or tuple: values will be stored in a temporary set
- a SetField: we will directly use it's content on redis
- a ListField or SortedSetField: values will be stored in a temporary
set (except if we want a sort or values and it's the only "set" to
use)
]
variable[sets_] assign[=] call[name[set], parameter[]]
for taget[name[set_]] in starred[name[sets]] begin[:]
if call[name[isinstance], parameter[name[set_], tuple[[<ast.Name object at 0x7da1b2623d00>, <ast.Name object at 0x7da1b2620fd0>]]]] begin[:]
variable[set_] assign[=] call[name[tuple], parameter[name[set_]]]
if call[name[isinstance], parameter[name[set_], name[SortedSetField]]] begin[:]
name[self]._has_sortedsets assign[=] constant[True]
call[name[sets_].add, parameter[name[set_]]]
call[call[name[self]._lazy_collection][constant[intersects]].update, parameter[name[sets_]]]
return[name[self]] | keyword[def] identifier[intersect] ( identifier[self] ,* identifier[sets] ):
literal[string]
identifier[sets_] = identifier[set] ()
keyword[for] identifier[set_] keyword[in] identifier[sets] :
keyword[if] identifier[isinstance] ( identifier[set_] ,( identifier[list] , identifier[set] )):
identifier[set_] = identifier[tuple] ( identifier[set_] )
keyword[elif] identifier[isinstance] ( identifier[set_] , identifier[MultiValuesField] ) keyword[and] keyword[not] identifier[getattr] ( identifier[set_] , literal[string] , keyword[None] ):
keyword[raise] identifier[ValueError] ( literal[string]
% identifier[set_] . identifier[__class__] . identifier[__name__] )
keyword[elif] keyword[not] identifier[isinstance] ( identifier[set_] ,( identifier[tuple] , identifier[str] , identifier[MultiValuesField] , identifier[_StoredCollection] )):
keyword[raise] identifier[ValueError] ( literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] % identifier[set_] )
keyword[if] identifier[isinstance] ( identifier[set_] , identifier[SortedSetField] ):
identifier[self] . identifier[_has_sortedsets] = keyword[True]
identifier[sets_] . identifier[add] ( identifier[set_] )
identifier[self] . identifier[_lazy_collection] [ literal[string] ]. identifier[update] ( identifier[sets_] )
keyword[return] identifier[self] | def intersect(self, *sets):
"""
Add a list of sets to the existing list of sets to check. Returns self
for chaining.
Each "set" represent a list of pk, the final goal is to return only pks
matching the intersection of all sets.
A "set" can be:
- a string: considered as the name of a redis set, sorted set or list
(if a list, values will be stored in a temporary set)
- a list, set or tuple: values will be stored in a temporary set
- a SetField: we will directly use it's content on redis
- a ListField or SortedSetField: values will be stored in a temporary
set (except if we want a sort or values and it's the only "set" to
use)
"""
sets_ = set()
for set_ in sets:
if isinstance(set_, (list, set)):
set_ = tuple(set_) # depends on [control=['if'], data=[]]
elif isinstance(set_, MultiValuesField) and (not getattr(set_, '_instance', None)):
raise ValueError('%s passed to "intersect" must be bound' % set_.__class__.__name__) # depends on [control=['if'], data=[]]
elif not isinstance(set_, (tuple, str, MultiValuesField, _StoredCollection)):
raise ValueError('%s is not a valid type of argument that can be used as a set. Allowed are: string (key of a redis set, sorted set or list), limpyd multi-values field (SetField, ListField or SortedSetField), or real python set, list or tuple' % set_) # depends on [control=['if'], data=[]]
if isinstance(set_, SortedSetField):
self._has_sortedsets = True # depends on [control=['if'], data=[]]
sets_.add(set_) # depends on [control=['for'], data=['set_']]
self._lazy_collection['intersects'].update(sets_)
return self |
def execute(self, context):
"""Execute the python dataflow job."""
bucket_helper = GoogleCloudBucketHelper(
self.gcp_conn_id, self.delegate_to)
self.py_file = bucket_helper.google_cloud_to_local(self.py_file)
hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id,
delegate_to=self.delegate_to,
poll_sleep=self.poll_sleep)
dataflow_options = self.dataflow_default_options.copy()
dataflow_options.update(self.options)
# Convert argument names from lowerCamelCase to snake case.
camel_to_snake = lambda name: re.sub(
r'[A-Z]', lambda x: '_' + x.group(0).lower(), name)
formatted_options = {camel_to_snake(key): dataflow_options[key]
for key in dataflow_options}
hook.start_python_dataflow(
self.job_name, formatted_options,
self.py_file, self.py_options) | def function[execute, parameter[self, context]]:
constant[Execute the python dataflow job.]
variable[bucket_helper] assign[=] call[name[GoogleCloudBucketHelper], parameter[name[self].gcp_conn_id, name[self].delegate_to]]
name[self].py_file assign[=] call[name[bucket_helper].google_cloud_to_local, parameter[name[self].py_file]]
variable[hook] assign[=] call[name[DataFlowHook], parameter[]]
variable[dataflow_options] assign[=] call[name[self].dataflow_default_options.copy, parameter[]]
call[name[dataflow_options].update, parameter[name[self].options]]
variable[camel_to_snake] assign[=] <ast.Lambda object at 0x7da2054a53c0>
variable[formatted_options] assign[=] <ast.DictComp object at 0x7da1b052be80>
call[name[hook].start_python_dataflow, parameter[name[self].job_name, name[formatted_options], name[self].py_file, name[self].py_options]] | keyword[def] identifier[execute] ( identifier[self] , identifier[context] ):
literal[string]
identifier[bucket_helper] = identifier[GoogleCloudBucketHelper] (
identifier[self] . identifier[gcp_conn_id] , identifier[self] . identifier[delegate_to] )
identifier[self] . identifier[py_file] = identifier[bucket_helper] . identifier[google_cloud_to_local] ( identifier[self] . identifier[py_file] )
identifier[hook] = identifier[DataFlowHook] ( identifier[gcp_conn_id] = identifier[self] . identifier[gcp_conn_id] ,
identifier[delegate_to] = identifier[self] . identifier[delegate_to] ,
identifier[poll_sleep] = identifier[self] . identifier[poll_sleep] )
identifier[dataflow_options] = identifier[self] . identifier[dataflow_default_options] . identifier[copy] ()
identifier[dataflow_options] . identifier[update] ( identifier[self] . identifier[options] )
identifier[camel_to_snake] = keyword[lambda] identifier[name] : identifier[re] . identifier[sub] (
literal[string] , keyword[lambda] identifier[x] : literal[string] + identifier[x] . identifier[group] ( literal[int] ). identifier[lower] (), identifier[name] )
identifier[formatted_options] ={ identifier[camel_to_snake] ( identifier[key] ): identifier[dataflow_options] [ identifier[key] ]
keyword[for] identifier[key] keyword[in] identifier[dataflow_options] }
identifier[hook] . identifier[start_python_dataflow] (
identifier[self] . identifier[job_name] , identifier[formatted_options] ,
identifier[self] . identifier[py_file] , identifier[self] . identifier[py_options] ) | def execute(self, context):
"""Execute the python dataflow job."""
bucket_helper = GoogleCloudBucketHelper(self.gcp_conn_id, self.delegate_to)
self.py_file = bucket_helper.google_cloud_to_local(self.py_file)
hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, poll_sleep=self.poll_sleep)
dataflow_options = self.dataflow_default_options.copy()
dataflow_options.update(self.options)
# Convert argument names from lowerCamelCase to snake case.
camel_to_snake = lambda name: re.sub('[A-Z]', lambda x: '_' + x.group(0).lower(), name)
formatted_options = {camel_to_snake(key): dataflow_options[key] for key in dataflow_options}
hook.start_python_dataflow(self.job_name, formatted_options, self.py_file, self.py_options) |
def get_manifest(self, asset_xml):
"""
Construct and return the xml manifest to deliver along with video file.
"""
# pylint: disable=E1101
manifest = '<?xml version="1.0" encoding="utf-8"?>'
manifest += '<publisher-upload-manifest publisher-id="%s" ' % \
self.publisher_id
manifest += 'preparer="%s" ' % self.preparer
if self.report_success:
manifest += 'report-success="TRUE">\n'
for notify in self.notifications:
manifest += '<notify email="%s"/>' % notify
if self.callback:
manifest += '<callback entity-url="%s"/>' % self.callback
manifest += asset_xml
manifest += '</publisher-upload-manifest>'
return manifest | def function[get_manifest, parameter[self, asset_xml]]:
constant[
Construct and return the xml manifest to deliver along with video file.
]
variable[manifest] assign[=] constant[<?xml version="1.0" encoding="utf-8"?>]
<ast.AugAssign object at 0x7da18ede4e50>
<ast.AugAssign object at 0x7da18ede4fd0>
if name[self].report_success begin[:]
<ast.AugAssign object at 0x7da18ede6e30>
for taget[name[notify]] in starred[name[self].notifications] begin[:]
<ast.AugAssign object at 0x7da18ede5300>
if name[self].callback begin[:]
<ast.AugAssign object at 0x7da18ede5840>
<ast.AugAssign object at 0x7da18ede5030>
<ast.AugAssign object at 0x7da18ede7880>
return[name[manifest]] | keyword[def] identifier[get_manifest] ( identifier[self] , identifier[asset_xml] ):
literal[string]
identifier[manifest] = literal[string]
identifier[manifest] += literal[string] % identifier[self] . identifier[publisher_id]
identifier[manifest] += literal[string] % identifier[self] . identifier[preparer]
keyword[if] identifier[self] . identifier[report_success] :
identifier[manifest] += literal[string]
keyword[for] identifier[notify] keyword[in] identifier[self] . identifier[notifications] :
identifier[manifest] += literal[string] % identifier[notify]
keyword[if] identifier[self] . identifier[callback] :
identifier[manifest] += literal[string] % identifier[self] . identifier[callback]
identifier[manifest] += identifier[asset_xml]
identifier[manifest] += literal[string]
keyword[return] identifier[manifest] | def get_manifest(self, asset_xml):
"""
Construct and return the xml manifest to deliver along with video file.
"""
# pylint: disable=E1101
manifest = '<?xml version="1.0" encoding="utf-8"?>'
manifest += '<publisher-upload-manifest publisher-id="%s" ' % self.publisher_id
manifest += 'preparer="%s" ' % self.preparer
if self.report_success:
manifest += 'report-success="TRUE">\n' # depends on [control=['if'], data=[]]
for notify in self.notifications:
manifest += '<notify email="%s"/>' % notify # depends on [control=['for'], data=['notify']]
if self.callback:
manifest += '<callback entity-url="%s"/>' % self.callback # depends on [control=['if'], data=[]]
manifest += asset_xml
manifest += '</publisher-upload-manifest>'
return manifest |
def get_console_info(kernel32, handle):
"""Get information about this current console window.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
https://code.google.com/p/colorama/issues/detail?id=47
https://bitbucket.org/pytest-dev/py/src/4617fe46/py/_io/terminalwriter.py
Windows 10 Insider since around February 2016 finally introduced support for ANSI colors. No need to replace stdout
and stderr streams to intercept colors and issue multiple SetConsoleTextAttribute() calls for these consoles.
:raise OSError: When GetConsoleScreenBufferInfo or GetConsoleMode API calls fail.
:param ctypes.windll.kernel32 kernel32: Loaded kernel32 instance.
:param int handle: stderr or stdout handle.
:return: Foreground and background colors (integers) as well as native ANSI support (bool).
:rtype: tuple
"""
# Query Win32 API.
csbi = ConsoleScreenBufferInfo() # Populated by GetConsoleScreenBufferInfo.
lpcsbi = ctypes.byref(csbi)
dword = ctypes.c_ulong() # Populated by GetConsoleMode.
lpdword = ctypes.byref(dword)
if not kernel32.GetConsoleScreenBufferInfo(handle, lpcsbi) or not kernel32.GetConsoleMode(handle, lpdword):
raise ctypes.WinError()
# Parse data.
# buffer_width = int(csbi.dwSize.X - 1)
# buffer_height = int(csbi.dwSize.Y)
# terminal_width = int(csbi.srWindow.Right - csbi.srWindow.Left)
# terminal_height = int(csbi.srWindow.Bottom - csbi.srWindow.Top)
fg_color = csbi.wAttributes % 16
bg_color = csbi.wAttributes & 240
native_ansi = bool(dword.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING)
return fg_color, bg_color, native_ansi | def function[get_console_info, parameter[kernel32, handle]]:
constant[Get information about this current console window.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
https://code.google.com/p/colorama/issues/detail?id=47
https://bitbucket.org/pytest-dev/py/src/4617fe46/py/_io/terminalwriter.py
Windows 10 Insider since around February 2016 finally introduced support for ANSI colors. No need to replace stdout
and stderr streams to intercept colors and issue multiple SetConsoleTextAttribute() calls for these consoles.
:raise OSError: When GetConsoleScreenBufferInfo or GetConsoleMode API calls fail.
:param ctypes.windll.kernel32 kernel32: Loaded kernel32 instance.
:param int handle: stderr or stdout handle.
:return: Foreground and background colors (integers) as well as native ANSI support (bool).
:rtype: tuple
]
variable[csbi] assign[=] call[name[ConsoleScreenBufferInfo], parameter[]]
variable[lpcsbi] assign[=] call[name[ctypes].byref, parameter[name[csbi]]]
variable[dword] assign[=] call[name[ctypes].c_ulong, parameter[]]
variable[lpdword] assign[=] call[name[ctypes].byref, parameter[name[dword]]]
if <ast.BoolOp object at 0x7da1b0f386d0> begin[:]
<ast.Raise object at 0x7da1b0f394e0>
variable[fg_color] assign[=] binary_operation[name[csbi].wAttributes <ast.Mod object at 0x7da2590d6920> constant[16]]
variable[bg_color] assign[=] binary_operation[name[csbi].wAttributes <ast.BitAnd object at 0x7da2590d6b60> constant[240]]
variable[native_ansi] assign[=] call[name[bool], parameter[binary_operation[name[dword].value <ast.BitAnd object at 0x7da2590d6b60> name[ENABLE_VIRTUAL_TERMINAL_PROCESSING]]]]
return[tuple[[<ast.Name object at 0x7da1b0e8e500>, <ast.Name object at 0x7da1b0e8e950>, <ast.Name object at 0x7da1b0e8e710>]]] | keyword[def] identifier[get_console_info] ( identifier[kernel32] , identifier[handle] ):
literal[string]
identifier[csbi] = identifier[ConsoleScreenBufferInfo] ()
identifier[lpcsbi] = identifier[ctypes] . identifier[byref] ( identifier[csbi] )
identifier[dword] = identifier[ctypes] . identifier[c_ulong] ()
identifier[lpdword] = identifier[ctypes] . identifier[byref] ( identifier[dword] )
keyword[if] keyword[not] identifier[kernel32] . identifier[GetConsoleScreenBufferInfo] ( identifier[handle] , identifier[lpcsbi] ) keyword[or] keyword[not] identifier[kernel32] . identifier[GetConsoleMode] ( identifier[handle] , identifier[lpdword] ):
keyword[raise] identifier[ctypes] . identifier[WinError] ()
identifier[fg_color] = identifier[csbi] . identifier[wAttributes] % literal[int]
identifier[bg_color] = identifier[csbi] . identifier[wAttributes] & literal[int]
identifier[native_ansi] = identifier[bool] ( identifier[dword] . identifier[value] & identifier[ENABLE_VIRTUAL_TERMINAL_PROCESSING] )
keyword[return] identifier[fg_color] , identifier[bg_color] , identifier[native_ansi] | def get_console_info(kernel32, handle):
"""Get information about this current console window.
http://msdn.microsoft.com/en-us/library/windows/desktop/ms683231
https://code.google.com/p/colorama/issues/detail?id=47
https://bitbucket.org/pytest-dev/py/src/4617fe46/py/_io/terminalwriter.py
Windows 10 Insider since around February 2016 finally introduced support for ANSI colors. No need to replace stdout
and stderr streams to intercept colors and issue multiple SetConsoleTextAttribute() calls for these consoles.
:raise OSError: When GetConsoleScreenBufferInfo or GetConsoleMode API calls fail.
:param ctypes.windll.kernel32 kernel32: Loaded kernel32 instance.
:param int handle: stderr or stdout handle.
:return: Foreground and background colors (integers) as well as native ANSI support (bool).
:rtype: tuple
"""
# Query Win32 API.
csbi = ConsoleScreenBufferInfo() # Populated by GetConsoleScreenBufferInfo.
lpcsbi = ctypes.byref(csbi)
dword = ctypes.c_ulong() # Populated by GetConsoleMode.
lpdword = ctypes.byref(dword)
if not kernel32.GetConsoleScreenBufferInfo(handle, lpcsbi) or not kernel32.GetConsoleMode(handle, lpdword):
raise ctypes.WinError() # depends on [control=['if'], data=[]]
# Parse data.
# buffer_width = int(csbi.dwSize.X - 1)
# buffer_height = int(csbi.dwSize.Y)
# terminal_width = int(csbi.srWindow.Right - csbi.srWindow.Left)
# terminal_height = int(csbi.srWindow.Bottom - csbi.srWindow.Top)
fg_color = csbi.wAttributes % 16
bg_color = csbi.wAttributes & 240
native_ansi = bool(dword.value & ENABLE_VIRTUAL_TERMINAL_PROCESSING)
return (fg_color, bg_color, native_ansi) |
def GetEstimatedYear(self):
"""Retrieves an estimate of the year.
This function determines the year in the following manner:
* see if the user provided a preferred year;
* see if knowledge base defines a year e.g. derived from preprocessing;
* determine the year based on the file entry metadata;
* default to the current year;
Returns:
int: estimated year.
"""
# TODO: improve this method to get a more reliable estimate.
# Preserve the year-less date and sort this out in the psort phase.
if self._preferred_year:
return self._preferred_year
if self._knowledge_base.year:
return self._knowledge_base.year
# TODO: Find a decent way to actually calculate the correct year
# instead of relying on stats object.
year = self._GetEarliestYearFromFileEntry()
if not year:
year = self._GetLatestYearFromFileEntry()
if not year:
year = timelib.GetCurrentYear()
return year | def function[GetEstimatedYear, parameter[self]]:
constant[Retrieves an estimate of the year.
This function determines the year in the following manner:
* see if the user provided a preferred year;
* see if knowledge base defines a year e.g. derived from preprocessing;
* determine the year based on the file entry metadata;
* default to the current year;
Returns:
int: estimated year.
]
if name[self]._preferred_year begin[:]
return[name[self]._preferred_year]
if name[self]._knowledge_base.year begin[:]
return[name[self]._knowledge_base.year]
variable[year] assign[=] call[name[self]._GetEarliestYearFromFileEntry, parameter[]]
if <ast.UnaryOp object at 0x7da1b1d74d00> begin[:]
variable[year] assign[=] call[name[self]._GetLatestYearFromFileEntry, parameter[]]
if <ast.UnaryOp object at 0x7da1b1d75780> begin[:]
variable[year] assign[=] call[name[timelib].GetCurrentYear, parameter[]]
return[name[year]] | keyword[def] identifier[GetEstimatedYear] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_preferred_year] :
keyword[return] identifier[self] . identifier[_preferred_year]
keyword[if] identifier[self] . identifier[_knowledge_base] . identifier[year] :
keyword[return] identifier[self] . identifier[_knowledge_base] . identifier[year]
identifier[year] = identifier[self] . identifier[_GetEarliestYearFromFileEntry] ()
keyword[if] keyword[not] identifier[year] :
identifier[year] = identifier[self] . identifier[_GetLatestYearFromFileEntry] ()
keyword[if] keyword[not] identifier[year] :
identifier[year] = identifier[timelib] . identifier[GetCurrentYear] ()
keyword[return] identifier[year] | def GetEstimatedYear(self):
"""Retrieves an estimate of the year.
This function determines the year in the following manner:
* see if the user provided a preferred year;
* see if knowledge base defines a year e.g. derived from preprocessing;
* determine the year based on the file entry metadata;
* default to the current year;
Returns:
int: estimated year.
"""
# TODO: improve this method to get a more reliable estimate.
# Preserve the year-less date and sort this out in the psort phase.
if self._preferred_year:
return self._preferred_year # depends on [control=['if'], data=[]]
if self._knowledge_base.year:
return self._knowledge_base.year # depends on [control=['if'], data=[]]
# TODO: Find a decent way to actually calculate the correct year
# instead of relying on stats object.
year = self._GetEarliestYearFromFileEntry()
if not year:
year = self._GetLatestYearFromFileEntry() # depends on [control=['if'], data=[]]
if not year:
year = timelib.GetCurrentYear() # depends on [control=['if'], data=[]]
return year |
def inst_repr(instance, fmt='str', public_only=True):
"""
Generate class instance signature from its __dict__
From python 3.6 dict is ordered and order of attributes will be preserved automatically
Args:
instance: class instance
fmt: ['json', 'str']
public_only: if display public members only
Returns:
str: string or json representation of instance
Examples:
>>> inst_repr(1)
''
>>> class SampleClass(object):
... def __init__(self):
... self.b = 3
... self.a = 4
... self._private_ = 'hidden'
>>>
>>> s = SampleClass()
>>> inst_repr(s)
'{b=3, a=4}'
>>> inst_repr(s, public_only=False)
'{b=3, a=4, _private_=hidden}'
>>> json.loads(inst_repr(s, fmt='json'))
{'b': 3, 'a': 4}
>>> inst_repr(s, fmt='unknown')
''
"""
if not hasattr(instance, '__dict__'): return ''
if public_only: inst_dict = {k: v for k, v in instance.__dict__.items() if k[0] != '_'}
else: inst_dict = instance.__dict__
if fmt == 'json': return json.dumps(inst_dict, indent=2)
elif fmt == 'str': return to_str(inst_dict, public_only=public_only)
return '' | def function[inst_repr, parameter[instance, fmt, public_only]]:
constant[
Generate class instance signature from its __dict__
From python 3.6 dict is ordered and order of attributes will be preserved automatically
Args:
instance: class instance
fmt: ['json', 'str']
public_only: if display public members only
Returns:
str: string or json representation of instance
Examples:
>>> inst_repr(1)
''
>>> class SampleClass(object):
... def __init__(self):
... self.b = 3
... self.a = 4
... self._private_ = 'hidden'
>>>
>>> s = SampleClass()
>>> inst_repr(s)
'{b=3, a=4}'
>>> inst_repr(s, public_only=False)
'{b=3, a=4, _private_=hidden}'
>>> json.loads(inst_repr(s, fmt='json'))
{'b': 3, 'a': 4}
>>> inst_repr(s, fmt='unknown')
''
]
if <ast.UnaryOp object at 0x7da1b0b54dc0> begin[:]
return[constant[]]
if name[public_only] begin[:]
variable[inst_dict] assign[=] <ast.DictComp object at 0x7da1b0ce4250>
if compare[name[fmt] equal[==] constant[json]] begin[:]
return[call[name[json].dumps, parameter[name[inst_dict]]]]
return[constant[]] | keyword[def] identifier[inst_repr] ( identifier[instance] , identifier[fmt] = literal[string] , identifier[public_only] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[instance] , literal[string] ): keyword[return] literal[string]
keyword[if] identifier[public_only] : identifier[inst_dict] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[instance] . identifier[__dict__] . identifier[items] () keyword[if] identifier[k] [ literal[int] ]!= literal[string] }
keyword[else] : identifier[inst_dict] = identifier[instance] . identifier[__dict__]
keyword[if] identifier[fmt] == literal[string] : keyword[return] identifier[json] . identifier[dumps] ( identifier[inst_dict] , identifier[indent] = literal[int] )
keyword[elif] identifier[fmt] == literal[string] : keyword[return] identifier[to_str] ( identifier[inst_dict] , identifier[public_only] = identifier[public_only] )
keyword[return] literal[string] | def inst_repr(instance, fmt='str', public_only=True):
"""
Generate class instance signature from its __dict__
From python 3.6 dict is ordered and order of attributes will be preserved automatically
Args:
instance: class instance
fmt: ['json', 'str']
public_only: if display public members only
Returns:
str: string or json representation of instance
Examples:
>>> inst_repr(1)
''
>>> class SampleClass(object):
... def __init__(self):
... self.b = 3
... self.a = 4
... self._private_ = 'hidden'
>>>
>>> s = SampleClass()
>>> inst_repr(s)
'{b=3, a=4}'
>>> inst_repr(s, public_only=False)
'{b=3, a=4, _private_=hidden}'
>>> json.loads(inst_repr(s, fmt='json'))
{'b': 3, 'a': 4}
>>> inst_repr(s, fmt='unknown')
''
"""
if not hasattr(instance, '__dict__'):
return '' # depends on [control=['if'], data=[]]
if public_only:
inst_dict = {k: v for (k, v) in instance.__dict__.items() if k[0] != '_'} # depends on [control=['if'], data=[]]
else:
inst_dict = instance.__dict__
if fmt == 'json':
return json.dumps(inst_dict, indent=2) # depends on [control=['if'], data=[]]
elif fmt == 'str':
return to_str(inst_dict, public_only=public_only) # depends on [control=['if'], data=[]]
return '' |
def _parse_custom_mpi_options(custom_mpi_options):
# type: (str) -> Tuple[argparse.Namespace, List[str]]
"""Parse custom MPI options provided by user. Known options default value will be overridden
and unknown options would be identified separately."""
parser = argparse.ArgumentParser()
parser.add_argument('--NCCL_DEBUG', default="INFO", type=str)
return parser.parse_known_args(custom_mpi_options.split()) | def function[_parse_custom_mpi_options, parameter[custom_mpi_options]]:
constant[Parse custom MPI options provided by user. Known options default value will be overridden
and unknown options would be identified separately.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--NCCL_DEBUG]]]
return[call[name[parser].parse_known_args, parameter[call[name[custom_mpi_options].split, parameter[]]]]] | keyword[def] identifier[_parse_custom_mpi_options] ( identifier[custom_mpi_options] ):
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = literal[string] , identifier[type] = identifier[str] )
keyword[return] identifier[parser] . identifier[parse_known_args] ( identifier[custom_mpi_options] . identifier[split] ()) | def _parse_custom_mpi_options(custom_mpi_options):
# type: (str) -> Tuple[argparse.Namespace, List[str]]
'Parse custom MPI options provided by user. Known options default value will be overridden\n and unknown options would be identified separately.'
parser = argparse.ArgumentParser()
parser.add_argument('--NCCL_DEBUG', default='INFO', type=str)
return parser.parse_known_args(custom_mpi_options.split()) |
def add_actions(target, actions, insert_before=None):
"""Add actions to a QMenu or a QToolBar."""
previous_action = None
target_actions = list(target.actions())
if target_actions:
previous_action = target_actions[-1]
if previous_action.isSeparator():
previous_action = None
for action in actions:
if (action is None) and (previous_action is not None):
if insert_before is None:
target.addSeparator()
else:
target.insertSeparator(insert_before)
elif isinstance(action, QMenu):
if insert_before is None:
target.addMenu(action)
else:
target.insertMenu(insert_before, action)
elif isinstance(action, QAction):
if isinstance(action, SpyderAction):
if isinstance(target, QMenu) or not isinstance(target, QToolBar):
try:
action = action.no_icon_action
except RuntimeError:
continue
if insert_before is None:
# This is needed in order to ignore adding an action whose
# wrapped C/C++ object has been deleted. See issue 5074
try:
target.addAction(action)
except RuntimeError:
continue
else:
target.insertAction(insert_before, action)
previous_action = action | def function[add_actions, parameter[target, actions, insert_before]]:
constant[Add actions to a QMenu or a QToolBar.]
variable[previous_action] assign[=] constant[None]
variable[target_actions] assign[=] call[name[list], parameter[call[name[target].actions, parameter[]]]]
if name[target_actions] begin[:]
variable[previous_action] assign[=] call[name[target_actions]][<ast.UnaryOp object at 0x7da2041dadd0>]
if call[name[previous_action].isSeparator, parameter[]] begin[:]
variable[previous_action] assign[=] constant[None]
for taget[name[action]] in starred[name[actions]] begin[:]
if <ast.BoolOp object at 0x7da2041d8670> begin[:]
if compare[name[insert_before] is constant[None]] begin[:]
call[name[target].addSeparator, parameter[]]
variable[previous_action] assign[=] name[action] | keyword[def] identifier[add_actions] ( identifier[target] , identifier[actions] , identifier[insert_before] = keyword[None] ):
literal[string]
identifier[previous_action] = keyword[None]
identifier[target_actions] = identifier[list] ( identifier[target] . identifier[actions] ())
keyword[if] identifier[target_actions] :
identifier[previous_action] = identifier[target_actions] [- literal[int] ]
keyword[if] identifier[previous_action] . identifier[isSeparator] ():
identifier[previous_action] = keyword[None]
keyword[for] identifier[action] keyword[in] identifier[actions] :
keyword[if] ( identifier[action] keyword[is] keyword[None] ) keyword[and] ( identifier[previous_action] keyword[is] keyword[not] keyword[None] ):
keyword[if] identifier[insert_before] keyword[is] keyword[None] :
identifier[target] . identifier[addSeparator] ()
keyword[else] :
identifier[target] . identifier[insertSeparator] ( identifier[insert_before] )
keyword[elif] identifier[isinstance] ( identifier[action] , identifier[QMenu] ):
keyword[if] identifier[insert_before] keyword[is] keyword[None] :
identifier[target] . identifier[addMenu] ( identifier[action] )
keyword[else] :
identifier[target] . identifier[insertMenu] ( identifier[insert_before] , identifier[action] )
keyword[elif] identifier[isinstance] ( identifier[action] , identifier[QAction] ):
keyword[if] identifier[isinstance] ( identifier[action] , identifier[SpyderAction] ):
keyword[if] identifier[isinstance] ( identifier[target] , identifier[QMenu] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[target] , identifier[QToolBar] ):
keyword[try] :
identifier[action] = identifier[action] . identifier[no_icon_action]
keyword[except] identifier[RuntimeError] :
keyword[continue]
keyword[if] identifier[insert_before] keyword[is] keyword[None] :
keyword[try] :
identifier[target] . identifier[addAction] ( identifier[action] )
keyword[except] identifier[RuntimeError] :
keyword[continue]
keyword[else] :
identifier[target] . identifier[insertAction] ( identifier[insert_before] , identifier[action] )
identifier[previous_action] = identifier[action] | def add_actions(target, actions, insert_before=None):
"""Add actions to a QMenu or a QToolBar."""
previous_action = None
target_actions = list(target.actions())
if target_actions:
previous_action = target_actions[-1]
if previous_action.isSeparator():
previous_action = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
for action in actions:
if action is None and previous_action is not None:
if insert_before is None:
target.addSeparator() # depends on [control=['if'], data=[]]
else:
target.insertSeparator(insert_before) # depends on [control=['if'], data=[]]
elif isinstance(action, QMenu):
if insert_before is None:
target.addMenu(action) # depends on [control=['if'], data=[]]
else:
target.insertMenu(insert_before, action) # depends on [control=['if'], data=[]]
elif isinstance(action, QAction):
if isinstance(action, SpyderAction):
if isinstance(target, QMenu) or not isinstance(target, QToolBar):
try:
action = action.no_icon_action # depends on [control=['try'], data=[]]
except RuntimeError:
continue # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if insert_before is None: # This is needed in order to ignore adding an action whose
# wrapped C/C++ object has been deleted. See issue 5074
try:
target.addAction(action) # depends on [control=['try'], data=[]]
except RuntimeError:
continue # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
target.insertAction(insert_before, action) # depends on [control=['if'], data=[]]
previous_action = action # depends on [control=['for'], data=['action']] |
def _back_compatible_gemini(conf_files, data):
"""Provide old install directory for configuration with GEMINI supplied tidy VCFs.
Handles new style (bcbio installed) and old style (GEMINI installed)
configuration and data locations.
"""
if vcfanno.is_human(data, builds=["37"]):
for f in conf_files:
if f and os.path.basename(f) == "gemini.conf" and os.path.exists(f):
with open(f) as in_handle:
for line in in_handle:
if line.startswith("file"):
fname = line.strip().split("=")[-1].replace('"', '').strip()
if fname.find(".tidy.") > 0:
return install.get_gemini_dir(data)
return None | def function[_back_compatible_gemini, parameter[conf_files, data]]:
constant[Provide old install directory for configuration with GEMINI supplied tidy VCFs.
Handles new style (bcbio installed) and old style (GEMINI installed)
configuration and data locations.
]
if call[name[vcfanno].is_human, parameter[name[data]]] begin[:]
for taget[name[f]] in starred[name[conf_files]] begin[:]
if <ast.BoolOp object at 0x7da1b1713d90> begin[:]
with call[name[open], parameter[name[f]]] begin[:]
for taget[name[line]] in starred[name[in_handle]] begin[:]
if call[name[line].startswith, parameter[constant[file]]] begin[:]
variable[fname] assign[=] call[call[call[call[call[name[line].strip, parameter[]].split, parameter[constant[=]]]][<ast.UnaryOp object at 0x7da1b18da530>].replace, parameter[constant["], constant[]]].strip, parameter[]]
if compare[call[name[fname].find, parameter[constant[.tidy.]]] greater[>] constant[0]] begin[:]
return[call[name[install].get_gemini_dir, parameter[name[data]]]]
return[constant[None]] | keyword[def] identifier[_back_compatible_gemini] ( identifier[conf_files] , identifier[data] ):
literal[string]
keyword[if] identifier[vcfanno] . identifier[is_human] ( identifier[data] , identifier[builds] =[ literal[string] ]):
keyword[for] identifier[f] keyword[in] identifier[conf_files] :
keyword[if] identifier[f] keyword[and] identifier[os] . identifier[path] . identifier[basename] ( identifier[f] )== literal[string] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[f] ):
keyword[with] identifier[open] ( identifier[f] ) keyword[as] identifier[in_handle] :
keyword[for] identifier[line] keyword[in] identifier[in_handle] :
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[fname] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] )[- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] ()
keyword[if] identifier[fname] . identifier[find] ( literal[string] )> literal[int] :
keyword[return] identifier[install] . identifier[get_gemini_dir] ( identifier[data] )
keyword[return] keyword[None] | def _back_compatible_gemini(conf_files, data):
"""Provide old install directory for configuration with GEMINI supplied tidy VCFs.
Handles new style (bcbio installed) and old style (GEMINI installed)
configuration and data locations.
"""
if vcfanno.is_human(data, builds=['37']):
for f in conf_files:
if f and os.path.basename(f) == 'gemini.conf' and os.path.exists(f):
with open(f) as in_handle:
for line in in_handle:
if line.startswith('file'):
fname = line.strip().split('=')[-1].replace('"', '').strip()
if fname.find('.tidy.') > 0:
return install.get_gemini_dir(data) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['in_handle']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['if'], data=[]]
return None |
def number(self):
# type: () -> int
""" Return this commits number.
This is the same as the total number of commits in history up until
this commit.
This value can be useful in some CI scenarios as it allows to track
progress on any given branch (although there can be two commits with the
same number existing on different branches).
Returns:
int: The commit number/index.
"""
cmd = 'git log --oneline {}'.format(self.sha1)
out = shell.run(cmd, capture=True, never_pretend=True).stdout.strip()
return len(out.splitlines()) | def function[number, parameter[self]]:
constant[ Return this commits number.
This is the same as the total number of commits in history up until
this commit.
This value can be useful in some CI scenarios as it allows to track
progress on any given branch (although there can be two commits with the
same number existing on different branches).
Returns:
int: The commit number/index.
]
variable[cmd] assign[=] call[constant[git log --oneline {}].format, parameter[name[self].sha1]]
variable[out] assign[=] call[call[name[shell].run, parameter[name[cmd]]].stdout.strip, parameter[]]
return[call[name[len], parameter[call[name[out].splitlines, parameter[]]]]] | keyword[def] identifier[number] ( identifier[self] ):
literal[string]
identifier[cmd] = literal[string] . identifier[format] ( identifier[self] . identifier[sha1] )
identifier[out] = identifier[shell] . identifier[run] ( identifier[cmd] , identifier[capture] = keyword[True] , identifier[never_pretend] = keyword[True] ). identifier[stdout] . identifier[strip] ()
keyword[return] identifier[len] ( identifier[out] . identifier[splitlines] ()) | def number(self):
# type: () -> int
' Return this commits number.\n\n This is the same as the total number of commits in history up until\n this commit.\n\n This value can be useful in some CI scenarios as it allows to track\n progress on any given branch (although there can be two commits with the\n same number existing on different branches).\n\n Returns:\n int: The commit number/index.\n '
cmd = 'git log --oneline {}'.format(self.sha1)
out = shell.run(cmd, capture=True, never_pretend=True).stdout.strip()
return len(out.splitlines()) |
def kick(self, group_name, user):
""" https://api.slack.com/methods/groups.kick
"""
group_id = self.get_group_id(group_name)
self.params.update({
'channel': group_id,
'user': user,
})
return FromUrl('https://slack.com/api/groups.kick', self._requests)(data=self.params).post() | def function[kick, parameter[self, group_name, user]]:
constant[ https://api.slack.com/methods/groups.kick
]
variable[group_id] assign[=] call[name[self].get_group_id, parameter[name[group_name]]]
call[name[self].params.update, parameter[dictionary[[<ast.Constant object at 0x7da1b1604be0>, <ast.Constant object at 0x7da1b1606680>], [<ast.Name object at 0x7da1b1606290>, <ast.Name object at 0x7da1b16040a0>]]]]
return[call[call[call[name[FromUrl], parameter[constant[https://slack.com/api/groups.kick], name[self]._requests]], parameter[]].post, parameter[]]] | keyword[def] identifier[kick] ( identifier[self] , identifier[group_name] , identifier[user] ):
literal[string]
identifier[group_id] = identifier[self] . identifier[get_group_id] ( identifier[group_name] )
identifier[self] . identifier[params] . identifier[update] ({
literal[string] : identifier[group_id] ,
literal[string] : identifier[user] ,
})
keyword[return] identifier[FromUrl] ( literal[string] , identifier[self] . identifier[_requests] )( identifier[data] = identifier[self] . identifier[params] ). identifier[post] () | def kick(self, group_name, user):
""" https://api.slack.com/methods/groups.kick
"""
group_id = self.get_group_id(group_name)
self.params.update({'channel': group_id, 'user': user})
return FromUrl('https://slack.com/api/groups.kick', self._requests)(data=self.params).post() |
def match_entry_line(str_to_match, regex_obj=MAIN_REGEX_OBJ):
"""Does a regex match of the mount entry string"""
match_obj = regex_obj.match(str_to_match)
if not match_obj:
error_message = ('Line "%s" is unrecognized by overlay4u. '
'This is only meant for use with Ubuntu Linux.')
raise UnrecognizedMountEntry(error_message % str_to_match)
return match_obj.groupdict() | def function[match_entry_line, parameter[str_to_match, regex_obj]]:
constant[Does a regex match of the mount entry string]
variable[match_obj] assign[=] call[name[regex_obj].match, parameter[name[str_to_match]]]
if <ast.UnaryOp object at 0x7da20e955b40> begin[:]
variable[error_message] assign[=] constant[Line "%s" is unrecognized by overlay4u. This is only meant for use with Ubuntu Linux.]
<ast.Raise object at 0x7da20e957c10>
return[call[name[match_obj].groupdict, parameter[]]] | keyword[def] identifier[match_entry_line] ( identifier[str_to_match] , identifier[regex_obj] = identifier[MAIN_REGEX_OBJ] ):
literal[string]
identifier[match_obj] = identifier[regex_obj] . identifier[match] ( identifier[str_to_match] )
keyword[if] keyword[not] identifier[match_obj] :
identifier[error_message] =( literal[string]
literal[string] )
keyword[raise] identifier[UnrecognizedMountEntry] ( identifier[error_message] % identifier[str_to_match] )
keyword[return] identifier[match_obj] . identifier[groupdict] () | def match_entry_line(str_to_match, regex_obj=MAIN_REGEX_OBJ):
"""Does a regex match of the mount entry string"""
match_obj = regex_obj.match(str_to_match)
if not match_obj:
error_message = 'Line "%s" is unrecognized by overlay4u. This is only meant for use with Ubuntu Linux.'
raise UnrecognizedMountEntry(error_message % str_to_match) # depends on [control=['if'], data=[]]
return match_obj.groupdict() |
def find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ['PATH']
paths = path.split(os.pathsep)
base, ext = os.path.splitext(executable)
if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
executable = executable + '.exe'
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f
return None
else:
return executable | def function[find_executable, parameter[executable, path]]:
constant[Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
]
if compare[name[path] is constant[None]] begin[:]
variable[path] assign[=] call[name[os].environ][constant[PATH]]
variable[paths] assign[=] call[name[path].split, parameter[name[os].pathsep]]
<ast.Tuple object at 0x7da18f8126e0> assign[=] call[name[os].path.splitext, parameter[name[executable]]]
if <ast.BoolOp object at 0x7da18f810070> begin[:]
variable[executable] assign[=] binary_operation[name[executable] + constant[.exe]]
if <ast.UnaryOp object at 0x7da18f811540> begin[:]
for taget[name[p]] in starred[name[paths]] begin[:]
variable[f] assign[=] call[name[os].path.join, parameter[name[p], name[executable]]]
if call[name[os].path.isfile, parameter[name[f]]] begin[:]
return[name[f]]
return[constant[None]] | keyword[def] identifier[find_executable] ( identifier[executable] , identifier[path] = keyword[None] ):
literal[string]
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[path] = identifier[os] . identifier[environ] [ literal[string] ]
identifier[paths] = identifier[path] . identifier[split] ( identifier[os] . identifier[pathsep] )
identifier[base] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[executable] )
keyword[if] ( identifier[sys] . identifier[platform] == literal[string] keyword[or] identifier[os] . identifier[name] == literal[string] ) keyword[and] ( identifier[ext] != literal[string] ):
identifier[executable] = identifier[executable] + literal[string]
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[executable] ):
keyword[for] identifier[p] keyword[in] identifier[paths] :
identifier[f] = identifier[os] . identifier[path] . identifier[join] ( identifier[p] , identifier[executable] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[f] ):
keyword[return] identifier[f]
keyword[return] keyword[None]
keyword[else] :
keyword[return] identifier[executable] | def find_executable(executable, path=None):
"""Tries to find 'executable' in the directories listed in 'path'.
A string listing directories separated by 'os.pathsep'; defaults to
os.environ['PATH']. Returns the complete filename or None if not found.
"""
if path is None:
path = os.environ['PATH'] # depends on [control=['if'], data=['path']]
paths = path.split(os.pathsep)
(base, ext) = os.path.splitext(executable)
if (sys.platform == 'win32' or os.name == 'os2') and ext != '.exe':
executable = executable + '.exe' # depends on [control=['if'], data=[]]
if not os.path.isfile(executable):
for p in paths:
f = os.path.join(p, executable)
if os.path.isfile(f):
# the file exists, we have a shot at spawn working
return f # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
return None # depends on [control=['if'], data=[]]
else:
return executable |
def remove_exclude_regions(orig_bed, base_file, items, remove_entire_feature=False):
"""Remove centromere and short end regions from an existing BED file of regions to target.
"""
from bcbio.structural import shared as sshared
out_bed = os.path.join("%s-noexclude.bed" % (utils.splitext_plus(base_file)[0]))
if not utils.file_uptodate(out_bed, orig_bed):
exclude_bed = sshared.prepare_exclude_file(items, base_file)
with file_transaction(items[0], out_bed) as tx_out_bed:
pybedtools.BedTool(orig_bed).subtract(pybedtools.BedTool(exclude_bed),
A=remove_entire_feature, nonamecheck=True).saveas(tx_out_bed)
if utils.file_exists(out_bed):
return out_bed
else:
return orig_bed | def function[remove_exclude_regions, parameter[orig_bed, base_file, items, remove_entire_feature]]:
constant[Remove centromere and short end regions from an existing BED file of regions to target.
]
from relative_module[bcbio.structural] import module[shared]
variable[out_bed] assign[=] call[name[os].path.join, parameter[binary_operation[constant[%s-noexclude.bed] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[name[base_file]]]][constant[0]]]]]
if <ast.UnaryOp object at 0x7da1b18ab760> begin[:]
variable[exclude_bed] assign[=] call[name[sshared].prepare_exclude_file, parameter[name[items], name[base_file]]]
with call[name[file_transaction], parameter[call[name[items]][constant[0]], name[out_bed]]] begin[:]
call[call[call[name[pybedtools].BedTool, parameter[name[orig_bed]]].subtract, parameter[call[name[pybedtools].BedTool, parameter[name[exclude_bed]]]]].saveas, parameter[name[tx_out_bed]]]
if call[name[utils].file_exists, parameter[name[out_bed]]] begin[:]
return[name[out_bed]] | keyword[def] identifier[remove_exclude_regions] ( identifier[orig_bed] , identifier[base_file] , identifier[items] , identifier[remove_entire_feature] = keyword[False] ):
literal[string]
keyword[from] identifier[bcbio] . identifier[structural] keyword[import] identifier[shared] keyword[as] identifier[sshared]
identifier[out_bed] = identifier[os] . identifier[path] . identifier[join] ( literal[string] %( identifier[utils] . identifier[splitext_plus] ( identifier[base_file] )[ literal[int] ]))
keyword[if] keyword[not] identifier[utils] . identifier[file_uptodate] ( identifier[out_bed] , identifier[orig_bed] ):
identifier[exclude_bed] = identifier[sshared] . identifier[prepare_exclude_file] ( identifier[items] , identifier[base_file] )
keyword[with] identifier[file_transaction] ( identifier[items] [ literal[int] ], identifier[out_bed] ) keyword[as] identifier[tx_out_bed] :
identifier[pybedtools] . identifier[BedTool] ( identifier[orig_bed] ). identifier[subtract] ( identifier[pybedtools] . identifier[BedTool] ( identifier[exclude_bed] ),
identifier[A] = identifier[remove_entire_feature] , identifier[nonamecheck] = keyword[True] ). identifier[saveas] ( identifier[tx_out_bed] )
keyword[if] identifier[utils] . identifier[file_exists] ( identifier[out_bed] ):
keyword[return] identifier[out_bed]
keyword[else] :
keyword[return] identifier[orig_bed] | def remove_exclude_regions(orig_bed, base_file, items, remove_entire_feature=False):
"""Remove centromere and short end regions from an existing BED file of regions to target.
"""
from bcbio.structural import shared as sshared
out_bed = os.path.join('%s-noexclude.bed' % utils.splitext_plus(base_file)[0])
if not utils.file_uptodate(out_bed, orig_bed):
exclude_bed = sshared.prepare_exclude_file(items, base_file)
with file_transaction(items[0], out_bed) as tx_out_bed:
pybedtools.BedTool(orig_bed).subtract(pybedtools.BedTool(exclude_bed), A=remove_entire_feature, nonamecheck=True).saveas(tx_out_bed) # depends on [control=['with'], data=['tx_out_bed']] # depends on [control=['if'], data=[]]
if utils.file_exists(out_bed):
return out_bed # depends on [control=['if'], data=[]]
else:
return orig_bed |
def main(args=None):
"""
Entry point for the tag CLI.
Isolated as a method so that the CLI can be called by other Python code
(e.g. for testing), in which case the arguments are passed to the function.
If no arguments are passed to the function, parse them from the command
line.
"""
if args is None:
args = tag.cli.parser().parse_args()
assert args.cmd in mains
mainmethod = mains[args.cmd]
mainmethod(args) | def function[main, parameter[args]]:
constant[
Entry point for the tag CLI.
Isolated as a method so that the CLI can be called by other Python code
(e.g. for testing), in which case the arguments are passed to the function.
If no arguments are passed to the function, parse them from the command
line.
]
if compare[name[args] is constant[None]] begin[:]
variable[args] assign[=] call[call[name[tag].cli.parser, parameter[]].parse_args, parameter[]]
assert[compare[name[args].cmd in name[mains]]]
variable[mainmethod] assign[=] call[name[mains]][name[args].cmd]
call[name[mainmethod], parameter[name[args]]] | keyword[def] identifier[main] ( identifier[args] = keyword[None] ):
literal[string]
keyword[if] identifier[args] keyword[is] keyword[None] :
identifier[args] = identifier[tag] . identifier[cli] . identifier[parser] (). identifier[parse_args] ()
keyword[assert] identifier[args] . identifier[cmd] keyword[in] identifier[mains]
identifier[mainmethod] = identifier[mains] [ identifier[args] . identifier[cmd] ]
identifier[mainmethod] ( identifier[args] ) | def main(args=None):
"""
Entry point for the tag CLI.
Isolated as a method so that the CLI can be called by other Python code
(e.g. for testing), in which case the arguments are passed to the function.
If no arguments are passed to the function, parse them from the command
line.
"""
if args is None:
args = tag.cli.parser().parse_args() # depends on [control=['if'], data=['args']]
assert args.cmd in mains
mainmethod = mains[args.cmd]
mainmethod(args) |
def Copy(self, name=None):
"""Returns a copy of this Cdf.
Args:
name: string name for the new Cdf
"""
if name is None:
name = self.name
return Cdf(list(self.xs), list(self.ps), name) | def function[Copy, parameter[self, name]]:
constant[Returns a copy of this Cdf.
Args:
name: string name for the new Cdf
]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] name[self].name
return[call[name[Cdf], parameter[call[name[list], parameter[name[self].xs]], call[name[list], parameter[name[self].ps]], name[name]]]] | keyword[def] identifier[Copy] ( identifier[self] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[self] . identifier[name]
keyword[return] identifier[Cdf] ( identifier[list] ( identifier[self] . identifier[xs] ), identifier[list] ( identifier[self] . identifier[ps] ), identifier[name] ) | def Copy(self, name=None):
"""Returns a copy of this Cdf.
Args:
name: string name for the new Cdf
"""
if name is None:
name = self.name # depends on [control=['if'], data=['name']]
return Cdf(list(self.xs), list(self.ps), name) |
def File(self, path):
"""Returns a reference to a file with a given path on client's VFS."""
return vfs.FileRef(
client_id=self.client_id, path=path, context=self._context) | def function[File, parameter[self, path]]:
constant[Returns a reference to a file with a given path on client's VFS.]
return[call[name[vfs].FileRef, parameter[]]] | keyword[def] identifier[File] ( identifier[self] , identifier[path] ):
literal[string]
keyword[return] identifier[vfs] . identifier[FileRef] (
identifier[client_id] = identifier[self] . identifier[client_id] , identifier[path] = identifier[path] , identifier[context] = identifier[self] . identifier[_context] ) | def File(self, path):
"""Returns a reference to a file with a given path on client's VFS."""
return vfs.FileRef(client_id=self.client_id, path=path, context=self._context) |
def _handle_output(results_queue):
"""Scan output for exceptions
If there is an output from an add task collection call add it to the results.
:param results_queue: Queue containing results of attempted add_collection's
:type results_queue: collections.deque
:return: list of TaskAddResults
:rtype: list[~TaskAddResult]
"""
results = []
while results_queue:
queue_item = results_queue.pop()
results.append(queue_item)
return results | def function[_handle_output, parameter[results_queue]]:
constant[Scan output for exceptions
If there is an output from an add task collection call add it to the results.
:param results_queue: Queue containing results of attempted add_collection's
:type results_queue: collections.deque
:return: list of TaskAddResults
:rtype: list[~TaskAddResult]
]
variable[results] assign[=] list[[]]
while name[results_queue] begin[:]
variable[queue_item] assign[=] call[name[results_queue].pop, parameter[]]
call[name[results].append, parameter[name[queue_item]]]
return[name[results]] | keyword[def] identifier[_handle_output] ( identifier[results_queue] ):
literal[string]
identifier[results] =[]
keyword[while] identifier[results_queue] :
identifier[queue_item] = identifier[results_queue] . identifier[pop] ()
identifier[results] . identifier[append] ( identifier[queue_item] )
keyword[return] identifier[results] | def _handle_output(results_queue):
"""Scan output for exceptions
If there is an output from an add task collection call add it to the results.
:param results_queue: Queue containing results of attempted add_collection's
:type results_queue: collections.deque
:return: list of TaskAddResults
:rtype: list[~TaskAddResult]
"""
results = []
while results_queue:
queue_item = results_queue.pop()
results.append(queue_item) # depends on [control=['while'], data=[]]
return results |
def read(datapath, qt_app=None, dataplus_format=True, gui=False, start=0, stop=None, step=1, convert_to_gray=True,
series_number=None, dicom_expected=None, **kwargs):
"""Simple read function. Internally calls DataReader.Get3DData()"""
dr = DataReader()
return dr.Get3DData(datapath=datapath, qt_app=qt_app, dataplus_format=dataplus_format, gui=gui, start=start,
stop=stop, step=step, convert_to_gray=convert_to_gray, series_number=series_number,
use_economic_dtype=True, dicom_expected=dicom_expected , **kwargs) | def function[read, parameter[datapath, qt_app, dataplus_format, gui, start, stop, step, convert_to_gray, series_number, dicom_expected]]:
constant[Simple read function. Internally calls DataReader.Get3DData()]
variable[dr] assign[=] call[name[DataReader], parameter[]]
return[call[name[dr].Get3DData, parameter[]]] | keyword[def] identifier[read] ( identifier[datapath] , identifier[qt_app] = keyword[None] , identifier[dataplus_format] = keyword[True] , identifier[gui] = keyword[False] , identifier[start] = literal[int] , identifier[stop] = keyword[None] , identifier[step] = literal[int] , identifier[convert_to_gray] = keyword[True] ,
identifier[series_number] = keyword[None] , identifier[dicom_expected] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[dr] = identifier[DataReader] ()
keyword[return] identifier[dr] . identifier[Get3DData] ( identifier[datapath] = identifier[datapath] , identifier[qt_app] = identifier[qt_app] , identifier[dataplus_format] = identifier[dataplus_format] , identifier[gui] = identifier[gui] , identifier[start] = identifier[start] ,
identifier[stop] = identifier[stop] , identifier[step] = identifier[step] , identifier[convert_to_gray] = identifier[convert_to_gray] , identifier[series_number] = identifier[series_number] ,
identifier[use_economic_dtype] = keyword[True] , identifier[dicom_expected] = identifier[dicom_expected] ,** identifier[kwargs] ) | def read(datapath, qt_app=None, dataplus_format=True, gui=False, start=0, stop=None, step=1, convert_to_gray=True, series_number=None, dicom_expected=None, **kwargs):
"""Simple read function. Internally calls DataReader.Get3DData()"""
dr = DataReader()
return dr.Get3DData(datapath=datapath, qt_app=qt_app, dataplus_format=dataplus_format, gui=gui, start=start, stop=stop, step=step, convert_to_gray=convert_to_gray, series_number=series_number, use_economic_dtype=True, dicom_expected=dicom_expected, **kwargs) |
def run(self, row, **kwargs):
"""Methods takes a row and depending if a dict or list,
runs RML rules.
Args:
-----
row(Dict, List): Row from CSV Reader
"""
self.source = row
kwargs['output'] = self.__graph__()
super(CSVRowProcessor, self).run(**kwargs)
return kwargs['output'] | def function[run, parameter[self, row]]:
constant[Methods takes a row and depending if a dict or list,
runs RML rules.
Args:
-----
row(Dict, List): Row from CSV Reader
]
name[self].source assign[=] name[row]
call[name[kwargs]][constant[output]] assign[=] call[name[self].__graph__, parameter[]]
call[call[name[super], parameter[name[CSVRowProcessor], name[self]]].run, parameter[]]
return[call[name[kwargs]][constant[output]]] | keyword[def] identifier[run] ( identifier[self] , identifier[row] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[source] = identifier[row]
identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[__graph__] ()
identifier[super] ( identifier[CSVRowProcessor] , identifier[self] ). identifier[run] (** identifier[kwargs] )
keyword[return] identifier[kwargs] [ literal[string] ] | def run(self, row, **kwargs):
"""Methods takes a row and depending if a dict or list,
runs RML rules.
Args:
-----
row(Dict, List): Row from CSV Reader
"""
self.source = row
kwargs['output'] = self.__graph__()
super(CSVRowProcessor, self).run(**kwargs)
return kwargs['output'] |
def min_pos(self):
'''Returns minimal positive value or None.'''
if self.__len__() == 0:
return ArgumentError('empty set has no minimum positive value.')
if self.contains(0):
return None
positive = [interval for interval in self.intervals
if interval.left > 0]
if len(positive) == 0:
return None
return numpy.min(list(map(lambda i: i.left, positive))) | def function[min_pos, parameter[self]]:
constant[Returns minimal positive value or None.]
if compare[call[name[self].__len__, parameter[]] equal[==] constant[0]] begin[:]
return[call[name[ArgumentError], parameter[constant[empty set has no minimum positive value.]]]]
if call[name[self].contains, parameter[constant[0]]] begin[:]
return[constant[None]]
variable[positive] assign[=] <ast.ListComp object at 0x7da1b26cbc70>
if compare[call[name[len], parameter[name[positive]]] equal[==] constant[0]] begin[:]
return[constant[None]]
return[call[name[numpy].min, parameter[call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b26ca050>, name[positive]]]]]]]] | keyword[def] identifier[min_pos] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[__len__] ()== literal[int] :
keyword[return] identifier[ArgumentError] ( literal[string] )
keyword[if] identifier[self] . identifier[contains] ( literal[int] ):
keyword[return] keyword[None]
identifier[positive] =[ identifier[interval] keyword[for] identifier[interval] keyword[in] identifier[self] . identifier[intervals]
keyword[if] identifier[interval] . identifier[left] > literal[int] ]
keyword[if] identifier[len] ( identifier[positive] )== literal[int] :
keyword[return] keyword[None]
keyword[return] identifier[numpy] . identifier[min] ( identifier[list] ( identifier[map] ( keyword[lambda] identifier[i] : identifier[i] . identifier[left] , identifier[positive] ))) | def min_pos(self):
"""Returns minimal positive value or None."""
if self.__len__() == 0:
return ArgumentError('empty set has no minimum positive value.') # depends on [control=['if'], data=[]]
if self.contains(0):
return None # depends on [control=['if'], data=[]]
positive = [interval for interval in self.intervals if interval.left > 0]
if len(positive) == 0:
return None # depends on [control=['if'], data=[]]
return numpy.min(list(map(lambda i: i.left, positive))) |
def previous_unwrittable_on_col(view, coords):
"""Return position of the previous (in column) letter that is unwrittable"""
x, y = coords
miny = -1
for offset in range(y - 1, miny, -1):
letter = view[x, offset]
if letter not in REWRITABLE_LETTERS:
return offset
return None | def function[previous_unwrittable_on_col, parameter[view, coords]]:
constant[Return position of the previous (in column) letter that is unwrittable]
<ast.Tuple object at 0x7da20c6e7be0> assign[=] name[coords]
variable[miny] assign[=] <ast.UnaryOp object at 0x7da20c6e4ac0>
for taget[name[offset]] in starred[call[name[range], parameter[binary_operation[name[y] - constant[1]], name[miny], <ast.UnaryOp object at 0x7da20c6e4c70>]]] begin[:]
variable[letter] assign[=] call[name[view]][tuple[[<ast.Name object at 0x7da20c6e46a0>, <ast.Name object at 0x7da20c6e4bb0>]]]
if compare[name[letter] <ast.NotIn object at 0x7da2590d7190> name[REWRITABLE_LETTERS]] begin[:]
return[name[offset]]
return[constant[None]] | keyword[def] identifier[previous_unwrittable_on_col] ( identifier[view] , identifier[coords] ):
literal[string]
identifier[x] , identifier[y] = identifier[coords]
identifier[miny] =- literal[int]
keyword[for] identifier[offset] keyword[in] identifier[range] ( identifier[y] - literal[int] , identifier[miny] ,- literal[int] ):
identifier[letter] = identifier[view] [ identifier[x] , identifier[offset] ]
keyword[if] identifier[letter] keyword[not] keyword[in] identifier[REWRITABLE_LETTERS] :
keyword[return] identifier[offset]
keyword[return] keyword[None] | def previous_unwrittable_on_col(view, coords):
"""Return position of the previous (in column) letter that is unwrittable"""
(x, y) = coords
miny = -1
for offset in range(y - 1, miny, -1):
letter = view[x, offset]
if letter not in REWRITABLE_LETTERS:
return offset # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['offset']]
return None |
async def destroy_models(self, *models, destroy_storage=False):
"""Destroy one or more models.
:param str *models: Names or UUIDs of models to destroy
:param bool destroy_storage: Whether or not to destroy storage when
destroying the models. Defaults to false.
"""
uuids = await self.model_uuids()
models = [uuids[model] if model in uuids else model
for model in models]
model_facade = client.ModelManagerFacade.from_connection(
self.connection())
log.debug(
'Destroying model%s %s',
'' if len(models) == 1 else 's',
', '.join(models)
)
if model_facade.version >= 5:
params = [
client.DestroyModelParams(model_tag=tag.model(model),
destroy_storage=destroy_storage)
for model in models]
else:
params = [client.Entity(tag.model(model)) for model in models]
await model_facade.DestroyModels(params) | <ast.AsyncFunctionDef object at 0x7da1b0ebe1a0> | keyword[async] keyword[def] identifier[destroy_models] ( identifier[self] ,* identifier[models] , identifier[destroy_storage] = keyword[False] ):
literal[string]
identifier[uuids] = keyword[await] identifier[self] . identifier[model_uuids] ()
identifier[models] =[ identifier[uuids] [ identifier[model] ] keyword[if] identifier[model] keyword[in] identifier[uuids] keyword[else] identifier[model]
keyword[for] identifier[model] keyword[in] identifier[models] ]
identifier[model_facade] = identifier[client] . identifier[ModelManagerFacade] . identifier[from_connection] (
identifier[self] . identifier[connection] ())
identifier[log] . identifier[debug] (
literal[string] ,
literal[string] keyword[if] identifier[len] ( identifier[models] )== literal[int] keyword[else] literal[string] ,
literal[string] . identifier[join] ( identifier[models] )
)
keyword[if] identifier[model_facade] . identifier[version] >= literal[int] :
identifier[params] =[
identifier[client] . identifier[DestroyModelParams] ( identifier[model_tag] = identifier[tag] . identifier[model] ( identifier[model] ),
identifier[destroy_storage] = identifier[destroy_storage] )
keyword[for] identifier[model] keyword[in] identifier[models] ]
keyword[else] :
identifier[params] =[ identifier[client] . identifier[Entity] ( identifier[tag] . identifier[model] ( identifier[model] )) keyword[for] identifier[model] keyword[in] identifier[models] ]
keyword[await] identifier[model_facade] . identifier[DestroyModels] ( identifier[params] ) | async def destroy_models(self, *models, destroy_storage=False):
"""Destroy one or more models.
:param str *models: Names or UUIDs of models to destroy
:param bool destroy_storage: Whether or not to destroy storage when
destroying the models. Defaults to false.
"""
uuids = await self.model_uuids()
models = [uuids[model] if model in uuids else model for model in models]
model_facade = client.ModelManagerFacade.from_connection(self.connection())
log.debug('Destroying model%s %s', '' if len(models) == 1 else 's', ', '.join(models))
if model_facade.version >= 5:
params = [client.DestroyModelParams(model_tag=tag.model(model), destroy_storage=destroy_storage) for model in models] # depends on [control=['if'], data=[]]
else:
params = [client.Entity(tag.model(model)) for model in models]
await model_facade.DestroyModels(params) |
def build_notification_message(template_context, template_configuration=None):
"""
Create HTML and plaintext message bodies for a notification.
We receive a context with data we can use to render, as well as an optional site
template configration - if we don't get a template configuration, we'll use the
standard, built-in template.
Arguments:
template_context (dict): A set of data to render
template_configuration: A database-backed object with templates
stored that can be used to render a notification.
"""
if (
template_configuration is not None and
template_configuration.html_template and
template_configuration.plaintext_template
):
plain_msg, html_msg = template_configuration.render_all_templates(template_context)
else:
plain_msg = render_to_string(
'enterprise/emails/user_notification.txt',
template_context
)
html_msg = render_to_string(
'enterprise/emails/user_notification.html',
template_context
)
return plain_msg, html_msg | def function[build_notification_message, parameter[template_context, template_configuration]]:
constant[
Create HTML and plaintext message bodies for a notification.
We receive a context with data we can use to render, as well as an optional site
template configration - if we don't get a template configuration, we'll use the
standard, built-in template.
Arguments:
template_context (dict): A set of data to render
template_configuration: A database-backed object with templates
stored that can be used to render a notification.
]
if <ast.BoolOp object at 0x7da18f09f880> begin[:]
<ast.Tuple object at 0x7da18f09dfc0> assign[=] call[name[template_configuration].render_all_templates, parameter[name[template_context]]]
return[tuple[[<ast.Name object at 0x7da18f09cbb0>, <ast.Name object at 0x7da18f09efe0>]]] | keyword[def] identifier[build_notification_message] ( identifier[template_context] , identifier[template_configuration] = keyword[None] ):
literal[string]
keyword[if] (
identifier[template_configuration] keyword[is] keyword[not] keyword[None] keyword[and]
identifier[template_configuration] . identifier[html_template] keyword[and]
identifier[template_configuration] . identifier[plaintext_template]
):
identifier[plain_msg] , identifier[html_msg] = identifier[template_configuration] . identifier[render_all_templates] ( identifier[template_context] )
keyword[else] :
identifier[plain_msg] = identifier[render_to_string] (
literal[string] ,
identifier[template_context]
)
identifier[html_msg] = identifier[render_to_string] (
literal[string] ,
identifier[template_context]
)
keyword[return] identifier[plain_msg] , identifier[html_msg] | def build_notification_message(template_context, template_configuration=None):
"""
Create HTML and plaintext message bodies for a notification.
We receive a context with data we can use to render, as well as an optional site
template configration - if we don't get a template configuration, we'll use the
standard, built-in template.
Arguments:
template_context (dict): A set of data to render
template_configuration: A database-backed object with templates
stored that can be used to render a notification.
"""
if template_configuration is not None and template_configuration.html_template and template_configuration.plaintext_template:
(plain_msg, html_msg) = template_configuration.render_all_templates(template_context) # depends on [control=['if'], data=[]]
else:
plain_msg = render_to_string('enterprise/emails/user_notification.txt', template_context)
html_msg = render_to_string('enterprise/emails/user_notification.html', template_context)
return (plain_msg, html_msg) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.