code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def return_single_features_base(dbpath, set_object, object_id): """ Generic function which returns the features of an object specified by the object_id Parameters ---------- dbpath : string, path to SQLite database file set_object : object (either TestSet or TrainSet) which is stored in the database object_id : int, id of object in database Returns ------- features : dict containing the features """ engine = create_engine('sqlite:////' + dbpath) session_cl = sessionmaker(bind=engine) session = session_cl() tmp_object = session.query(set_object).get(object_id) session.close() return tmp_object.features
def function[return_single_features_base, parameter[dbpath, set_object, object_id]]: constant[ Generic function which returns the features of an object specified by the object_id Parameters ---------- dbpath : string, path to SQLite database file set_object : object (either TestSet or TrainSet) which is stored in the database object_id : int, id of object in database Returns ------- features : dict containing the features ] variable[engine] assign[=] call[name[create_engine], parameter[binary_operation[constant[sqlite:////] + name[dbpath]]]] variable[session_cl] assign[=] call[name[sessionmaker], parameter[]] variable[session] assign[=] call[name[session_cl], parameter[]] variable[tmp_object] assign[=] call[call[name[session].query, parameter[name[set_object]]].get, parameter[name[object_id]]] call[name[session].close, parameter[]] return[name[tmp_object].features]
keyword[def] identifier[return_single_features_base] ( identifier[dbpath] , identifier[set_object] , identifier[object_id] ): literal[string] identifier[engine] = identifier[create_engine] ( literal[string] + identifier[dbpath] ) identifier[session_cl] = identifier[sessionmaker] ( identifier[bind] = identifier[engine] ) identifier[session] = identifier[session_cl] () identifier[tmp_object] = identifier[session] . identifier[query] ( identifier[set_object] ). identifier[get] ( identifier[object_id] ) identifier[session] . identifier[close] () keyword[return] identifier[tmp_object] . identifier[features]
def return_single_features_base(dbpath, set_object, object_id): """ Generic function which returns the features of an object specified by the object_id Parameters ---------- dbpath : string, path to SQLite database file set_object : object (either TestSet or TrainSet) which is stored in the database object_id : int, id of object in database Returns ------- features : dict containing the features """ engine = create_engine('sqlite:////' + dbpath) session_cl = sessionmaker(bind=engine) session = session_cl() tmp_object = session.query(set_object).get(object_id) session.close() return tmp_object.features
def get_requirements(opts): ''' Get the proper requirements file based on the optional argument ''' if opts.dev: name = 'requirements_dev.txt' elif opts.doc: name = 'requirements_doc.txt' else: name = 'requirements.txt' requirements_file = os.path.join(os.path.dirname(__file__), name) install_requires = [line.strip().replace('==', '>=') for line in open(requirements_file) if not line.strip().startswith('#') and line.strip() != ''] return install_requires
def function[get_requirements, parameter[opts]]: constant[ Get the proper requirements file based on the optional argument ] if name[opts].dev begin[:] variable[name] assign[=] constant[requirements_dev.txt] variable[requirements_file] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[__file__]]], name[name]]] variable[install_requires] assign[=] <ast.ListComp object at 0x7da1b0dc0a30> return[name[install_requires]]
keyword[def] identifier[get_requirements] ( identifier[opts] ): literal[string] keyword[if] identifier[opts] . identifier[dev] : identifier[name] = literal[string] keyword[elif] identifier[opts] . identifier[doc] : identifier[name] = literal[string] keyword[else] : identifier[name] = literal[string] identifier[requirements_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ), identifier[name] ) identifier[install_requires] =[ identifier[line] . identifier[strip] (). identifier[replace] ( literal[string] , literal[string] ) keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[requirements_file] ) keyword[if] keyword[not] identifier[line] . identifier[strip] (). identifier[startswith] ( literal[string] ) keyword[and] identifier[line] . identifier[strip] ()!= literal[string] ] keyword[return] identifier[install_requires]
def get_requirements(opts): """ Get the proper requirements file based on the optional argument """ if opts.dev: name = 'requirements_dev.txt' # depends on [control=['if'], data=[]] elif opts.doc: name = 'requirements_doc.txt' # depends on [control=['if'], data=[]] else: name = 'requirements.txt' requirements_file = os.path.join(os.path.dirname(__file__), name) install_requires = [line.strip().replace('==', '>=') for line in open(requirements_file) if not line.strip().startswith('#') and line.strip() != ''] return install_requires
def lessons(self): """ 返回lessons,如果未调用过``get_lesson()``会自动调用 :return: list of lessons :rtype: list """ if hasattr(self, '_lessons'): return self._lessons else: self.get_lesson() return self._lessons
def function[lessons, parameter[self]]: constant[ 返回lessons,如果未调用过``get_lesson()``会自动调用 :return: list of lessons :rtype: list ] if call[name[hasattr], parameter[name[self], constant[_lessons]]] begin[:] return[name[self]._lessons]
keyword[def] identifier[lessons] ( identifier[self] ): literal[string] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[return] identifier[self] . identifier[_lessons] keyword[else] : identifier[self] . identifier[get_lesson] () keyword[return] identifier[self] . identifier[_lessons]
def lessons(self): """ 返回lessons,如果未调用过``get_lesson()``会自动调用 :return: list of lessons :rtype: list """ if hasattr(self, '_lessons'): return self._lessons # depends on [control=['if'], data=[]] else: self.get_lesson() return self._lessons
def transcript_to_gene(gtf): """ return a dictionary keyed by transcript_id of the associated gene_id """ gene_lookup = {} for feature in complete_features(get_gtf_db(gtf)): gene_id = feature.attributes.get('gene_id', [None])[0] transcript_id = feature.attributes.get('transcript_id', [None])[0] gene_lookup[transcript_id] = gene_id return gene_lookup
def function[transcript_to_gene, parameter[gtf]]: constant[ return a dictionary keyed by transcript_id of the associated gene_id ] variable[gene_lookup] assign[=] dictionary[[], []] for taget[name[feature]] in starred[call[name[complete_features], parameter[call[name[get_gtf_db], parameter[name[gtf]]]]]] begin[:] variable[gene_id] assign[=] call[call[name[feature].attributes.get, parameter[constant[gene_id], list[[<ast.Constant object at 0x7da18f09d150>]]]]][constant[0]] variable[transcript_id] assign[=] call[call[name[feature].attributes.get, parameter[constant[transcript_id], list[[<ast.Constant object at 0x7da18dc9b9a0>]]]]][constant[0]] call[name[gene_lookup]][name[transcript_id]] assign[=] name[gene_id] return[name[gene_lookup]]
keyword[def] identifier[transcript_to_gene] ( identifier[gtf] ): literal[string] identifier[gene_lookup] ={} keyword[for] identifier[feature] keyword[in] identifier[complete_features] ( identifier[get_gtf_db] ( identifier[gtf] )): identifier[gene_id] = identifier[feature] . identifier[attributes] . identifier[get] ( literal[string] ,[ keyword[None] ])[ literal[int] ] identifier[transcript_id] = identifier[feature] . identifier[attributes] . identifier[get] ( literal[string] ,[ keyword[None] ])[ literal[int] ] identifier[gene_lookup] [ identifier[transcript_id] ]= identifier[gene_id] keyword[return] identifier[gene_lookup]
def transcript_to_gene(gtf): """ return a dictionary keyed by transcript_id of the associated gene_id """ gene_lookup = {} for feature in complete_features(get_gtf_db(gtf)): gene_id = feature.attributes.get('gene_id', [None])[0] transcript_id = feature.attributes.get('transcript_id', [None])[0] gene_lookup[transcript_id] = gene_id # depends on [control=['for'], data=['feature']] return gene_lookup
def network_interfaces_list(resource_group, **kwargs): ''' .. versionadded:: 2019.2.0 List all network interfaces within a resource group. :param resource_group: The resource group name to list network interfaces within. CLI Example: .. code-block:: bash salt-call azurearm_network.network_interfaces_list testgroup ''' result = {} netconn = __utils__['azurearm.get_client']('network', **kwargs) try: nics = __utils__['azurearm.paged_object_to_list']( netconn.network_interfaces.list( resource_group_name=resource_group ) ) for nic in nics: result[nic['name']] = nic except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) result = {'error': str(exc)} return result
def function[network_interfaces_list, parameter[resource_group]]: constant[ .. versionadded:: 2019.2.0 List all network interfaces within a resource group. :param resource_group: The resource group name to list network interfaces within. CLI Example: .. code-block:: bash salt-call azurearm_network.network_interfaces_list testgroup ] variable[result] assign[=] dictionary[[], []] variable[netconn] assign[=] call[call[name[__utils__]][constant[azurearm.get_client]], parameter[constant[network]]] <ast.Try object at 0x7da1b2344a30> return[name[result]]
keyword[def] identifier[network_interfaces_list] ( identifier[resource_group] ,** identifier[kwargs] ): literal[string] identifier[result] ={} identifier[netconn] = identifier[__utils__] [ literal[string] ]( literal[string] ,** identifier[kwargs] ) keyword[try] : identifier[nics] = identifier[__utils__] [ literal[string] ]( identifier[netconn] . identifier[network_interfaces] . identifier[list] ( identifier[resource_group_name] = identifier[resource_group] ) ) keyword[for] identifier[nic] keyword[in] identifier[nics] : identifier[result] [ identifier[nic] [ literal[string] ]]= identifier[nic] keyword[except] identifier[CloudError] keyword[as] identifier[exc] : identifier[__utils__] [ literal[string] ]( literal[string] , identifier[str] ( identifier[exc] ),** identifier[kwargs] ) identifier[result] ={ literal[string] : identifier[str] ( identifier[exc] )} keyword[return] identifier[result]
def network_interfaces_list(resource_group, **kwargs): """ .. versionadded:: 2019.2.0 List all network interfaces within a resource group. :param resource_group: The resource group name to list network interfaces within. CLI Example: .. code-block:: bash salt-call azurearm_network.network_interfaces_list testgroup """ result = {} netconn = __utils__['azurearm.get_client']('network', **kwargs) try: nics = __utils__['azurearm.paged_object_to_list'](netconn.network_interfaces.list(resource_group_name=resource_group)) for nic in nics: result[nic['name']] = nic # depends on [control=['for'], data=['nic']] # depends on [control=['try'], data=[]] except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) result = {'error': str(exc)} # depends on [control=['except'], data=['exc']] return result
def ShowErrorBarCaps(ax): """Show error bar caps.    Seaborn paper style hides error bar caps. Call this function on an axes object to make them visible again. """ for ch in ax.get_children(): if str(ch).startswith('Line2D'): ch.set_markeredgewidth(1) ch.set_markersize(8)
def function[ShowErrorBarCaps, parameter[ax]]: constant[Show error bar caps.    Seaborn paper style hides error bar caps. Call this function on an axes object to make them visible again. ] for taget[name[ch]] in starred[call[name[ax].get_children, parameter[]]] begin[:] if call[call[name[str], parameter[name[ch]]].startswith, parameter[constant[Line2D]]] begin[:] call[name[ch].set_markeredgewidth, parameter[constant[1]]] call[name[ch].set_markersize, parameter[constant[8]]]
keyword[def] identifier[ShowErrorBarCaps] ( identifier[ax] ): literal[string] keyword[for] identifier[ch] keyword[in] identifier[ax] . identifier[get_children] (): keyword[if] identifier[str] ( identifier[ch] ). identifier[startswith] ( literal[string] ): identifier[ch] . identifier[set_markeredgewidth] ( literal[int] ) identifier[ch] . identifier[set_markersize] ( literal[int] )
def ShowErrorBarCaps(ax): """Show error bar caps. \xa0\xa0 Seaborn paper style hides error bar caps. Call this function on an axes object to make them visible again. """ for ch in ax.get_children(): if str(ch).startswith('Line2D'): ch.set_markeredgewidth(1) ch.set_markersize(8) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ch']]
def pass_feature(*feature_names): """Injects a feature instance into the kwargs """ def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): for name in feature_names: kwargs[name] = feature_proxy(name) return f(*args, **kwargs) return wrapper return decorator
def function[pass_feature, parameter[]]: constant[Injects a feature instance into the kwargs ] def function[decorator, parameter[f]]: def function[wrapper, parameter[]]: for taget[name[name]] in starred[name[feature_names]] begin[:] call[name[kwargs]][name[name]] assign[=] call[name[feature_proxy], parameter[name[name]]] return[call[name[f], parameter[<ast.Starred object at 0x7da204620520>]]] return[name[wrapper]] return[name[decorator]]
keyword[def] identifier[pass_feature] (* identifier[feature_names] ): literal[string] keyword[def] identifier[decorator] ( identifier[f] ): @ identifier[functools] . identifier[wraps] ( identifier[f] ) keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): keyword[for] identifier[name] keyword[in] identifier[feature_names] : identifier[kwargs] [ identifier[name] ]= identifier[feature_proxy] ( identifier[name] ) keyword[return] identifier[f] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[wrapper] keyword[return] identifier[decorator]
def pass_feature(*feature_names): """Injects a feature instance into the kwargs """ def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): for name in feature_names: kwargs[name] = feature_proxy(name) # depends on [control=['for'], data=['name']] return f(*args, **kwargs) return wrapper return decorator
def _construct_sparse_features(self, x): """ Helper to construct a sparse representation of the features. """ I, J, K = x.shape new_array_height = (x != 0).sum(axis=2).max() index_array = -np.ones((I, J, new_array_height), dtype='int64') value_array = -np.ones((I, J, new_array_height), dtype='float64') populate_sparse_features(x, index_array, value_array, I, J, K) return index_array, value_array
def function[_construct_sparse_features, parameter[self, x]]: constant[ Helper to construct a sparse representation of the features. ] <ast.Tuple object at 0x7da1b253c070> assign[=] name[x].shape variable[new_array_height] assign[=] call[call[compare[name[x] not_equal[!=] constant[0]].sum, parameter[]].max, parameter[]] variable[index_array] assign[=] <ast.UnaryOp object at 0x7da1b253ec80> variable[value_array] assign[=] <ast.UnaryOp object at 0x7da1b253e500> call[name[populate_sparse_features], parameter[name[x], name[index_array], name[value_array], name[I], name[J], name[K]]] return[tuple[[<ast.Name object at 0x7da20c6c4760>, <ast.Name object at 0x7da20c6c51b0>]]]
keyword[def] identifier[_construct_sparse_features] ( identifier[self] , identifier[x] ): literal[string] identifier[I] , identifier[J] , identifier[K] = identifier[x] . identifier[shape] identifier[new_array_height] =( identifier[x] != literal[int] ). identifier[sum] ( identifier[axis] = literal[int] ). identifier[max] () identifier[index_array] =- identifier[np] . identifier[ones] (( identifier[I] , identifier[J] , identifier[new_array_height] ), identifier[dtype] = literal[string] ) identifier[value_array] =- identifier[np] . identifier[ones] (( identifier[I] , identifier[J] , identifier[new_array_height] ), identifier[dtype] = literal[string] ) identifier[populate_sparse_features] ( identifier[x] , identifier[index_array] , identifier[value_array] , identifier[I] , identifier[J] , identifier[K] ) keyword[return] identifier[index_array] , identifier[value_array]
def _construct_sparse_features(self, x): """ Helper to construct a sparse representation of the features. """ (I, J, K) = x.shape new_array_height = (x != 0).sum(axis=2).max() index_array = -np.ones((I, J, new_array_height), dtype='int64') value_array = -np.ones((I, J, new_array_height), dtype='float64') populate_sparse_features(x, index_array, value_array, I, J, K) return (index_array, value_array)
def normalised_autocorrelation_function(chain, index=0, burn=None, limit=None, fig=None, figsize=None): """ Plot the autocorrelation function for each parameter of a sampler chain. :param chain: The sampled parameter values. :type chain: :class:`numpy.ndarray` :param index: [optional] Index to calculate the autocorrelation from. :type index: int :param limit: [optional] Maximum number of MCMC steps to display. By default half of the chain will be shown. :type limit: int :param fig: [optional] Figure class to use. :type fig: :class:`matplotlib.Figure` or None :param figsize: [optional] The figure size (x-dimension, y-dimension) in inches. :type figsize: tuple or None """ factor = 2.0 lbdim = 0.2 * factor trdim = 0.2 * factor whspace = 0.10 dimy = lbdim + factor + trdim dimx = lbdim + factor + trdim if fig is None: fig, ax = plt.subplots(figsize=figsize) else: ax = fig.axes[0] lm = lbdim / dimx bm = lbdim / dimy trm = (lbdim + factor) / dimy fig.subplots_adjust(left=lm, bottom=bm, right=trm, top=trm, wspace=whspace, hspace=whspace) # Calculate the autocorrelation function for each parameter num_parameters = chain.shape[2] for i in xrange(num_parameters): try: rho = emcee.autocorr.function(np.mean(chain[:, index:, i], axis=0)) except RuntimeError: logger.exception("Error in calculating auto-correlation function "\ "for parameter index {}".format(i)) else: ax.plot(rho, "k", lw=1) if burn: ax.axvline(burn, linestyle=":", color="k") ax.xaxis.set_major_locator(MaxNLocator(5)) [l.set_rotation(45) for l in ax.get_xticklabels()] ax.set_yticks([-0.5, 0, 0.5, 1.0]) [l.set_rotation(45) for l in ax.get_yticklabels()] ax.axhline(0, color="k") ax.set_xlim(0, limit if limit is not None else chain.shape[1] - index) ax.set_xlabel("$\\tau$") ax.set_ylabel("Auto-correlation") fig.tight_layout() return fig
def function[normalised_autocorrelation_function, parameter[chain, index, burn, limit, fig, figsize]]: constant[ Plot the autocorrelation function for each parameter of a sampler chain. :param chain: The sampled parameter values. :type chain: :class:`numpy.ndarray` :param index: [optional] Index to calculate the autocorrelation from. :type index: int :param limit: [optional] Maximum number of MCMC steps to display. By default half of the chain will be shown. :type limit: int :param fig: [optional] Figure class to use. :type fig: :class:`matplotlib.Figure` or None :param figsize: [optional] The figure size (x-dimension, y-dimension) in inches. :type figsize: tuple or None ] variable[factor] assign[=] constant[2.0] variable[lbdim] assign[=] binary_operation[constant[0.2] * name[factor]] variable[trdim] assign[=] binary_operation[constant[0.2] * name[factor]] variable[whspace] assign[=] constant[0.1] variable[dimy] assign[=] binary_operation[binary_operation[name[lbdim] + name[factor]] + name[trdim]] variable[dimx] assign[=] binary_operation[binary_operation[name[lbdim] + name[factor]] + name[trdim]] if compare[name[fig] is constant[None]] begin[:] <ast.Tuple object at 0x7da2044c29b0> assign[=] call[name[plt].subplots, parameter[]] variable[lm] assign[=] binary_operation[name[lbdim] / name[dimx]] variable[bm] assign[=] binary_operation[name[lbdim] / name[dimy]] variable[trm] assign[=] binary_operation[binary_operation[name[lbdim] + name[factor]] / name[dimy]] call[name[fig].subplots_adjust, parameter[]] variable[num_parameters] assign[=] call[name[chain].shape][constant[2]] for taget[name[i]] in starred[call[name[xrange], parameter[name[num_parameters]]]] begin[:] <ast.Try object at 0x7da20e956110> if name[burn] begin[:] call[name[ax].axvline, parameter[name[burn]]] call[name[ax].xaxis.set_major_locator, parameter[call[name[MaxNLocator], parameter[constant[5]]]]] <ast.ListComp object at 0x7da20e9577c0> call[name[ax].set_yticks, parameter[list[[<ast.UnaryOp object at 0x7da20e9567a0>, <ast.Constant object at 0x7da20e956b30>, <ast.Constant object at 0x7da20e954dc0>, <ast.Constant object at 0x7da20e957ac0>]]]] <ast.ListComp object at 0x7da20e955c90> call[name[ax].axhline, parameter[constant[0]]] call[name[ax].set_xlim, parameter[constant[0], <ast.IfExp object at 0x7da20e957910>]] call[name[ax].set_xlabel, parameter[constant[$\tau$]]] call[name[ax].set_ylabel, parameter[constant[Auto-correlation]]] call[name[fig].tight_layout, parameter[]] return[name[fig]]
keyword[def] identifier[normalised_autocorrelation_function] ( identifier[chain] , identifier[index] = literal[int] , identifier[burn] = keyword[None] , identifier[limit] = keyword[None] , identifier[fig] = keyword[None] , identifier[figsize] = keyword[None] ): literal[string] identifier[factor] = literal[int] identifier[lbdim] = literal[int] * identifier[factor] identifier[trdim] = literal[int] * identifier[factor] identifier[whspace] = literal[int] identifier[dimy] = identifier[lbdim] + identifier[factor] + identifier[trdim] identifier[dimx] = identifier[lbdim] + identifier[factor] + identifier[trdim] keyword[if] identifier[fig] keyword[is] keyword[None] : identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] = identifier[figsize] ) keyword[else] : identifier[ax] = identifier[fig] . identifier[axes] [ literal[int] ] identifier[lm] = identifier[lbdim] / identifier[dimx] identifier[bm] = identifier[lbdim] / identifier[dimy] identifier[trm] =( identifier[lbdim] + identifier[factor] )/ identifier[dimy] identifier[fig] . identifier[subplots_adjust] ( identifier[left] = identifier[lm] , identifier[bottom] = identifier[bm] , identifier[right] = identifier[trm] , identifier[top] = identifier[trm] , identifier[wspace] = identifier[whspace] , identifier[hspace] = identifier[whspace] ) identifier[num_parameters] = identifier[chain] . identifier[shape] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[num_parameters] ): keyword[try] : identifier[rho] = identifier[emcee] . identifier[autocorr] . identifier[function] ( identifier[np] . identifier[mean] ( identifier[chain] [:, identifier[index] :, identifier[i] ], identifier[axis] = literal[int] )) keyword[except] identifier[RuntimeError] : identifier[logger] . identifier[exception] ( literal[string] literal[string] . identifier[format] ( identifier[i] )) keyword[else] : identifier[ax] . identifier[plot] ( identifier[rho] , literal[string] , identifier[lw] = literal[int] ) keyword[if] identifier[burn] : identifier[ax] . identifier[axvline] ( identifier[burn] , identifier[linestyle] = literal[string] , identifier[color] = literal[string] ) identifier[ax] . identifier[xaxis] . identifier[set_major_locator] ( identifier[MaxNLocator] ( literal[int] )) [ identifier[l] . identifier[set_rotation] ( literal[int] ) keyword[for] identifier[l] keyword[in] identifier[ax] . identifier[get_xticklabels] ()] identifier[ax] . identifier[set_yticks] ([- literal[int] , literal[int] , literal[int] , literal[int] ]) [ identifier[l] . identifier[set_rotation] ( literal[int] ) keyword[for] identifier[l] keyword[in] identifier[ax] . identifier[get_yticklabels] ()] identifier[ax] . identifier[axhline] ( literal[int] , identifier[color] = literal[string] ) identifier[ax] . identifier[set_xlim] ( literal[int] , identifier[limit] keyword[if] identifier[limit] keyword[is] keyword[not] keyword[None] keyword[else] identifier[chain] . identifier[shape] [ literal[int] ]- identifier[index] ) identifier[ax] . identifier[set_xlabel] ( literal[string] ) identifier[ax] . identifier[set_ylabel] ( literal[string] ) identifier[fig] . identifier[tight_layout] () keyword[return] identifier[fig]
def normalised_autocorrelation_function(chain, index=0, burn=None, limit=None, fig=None, figsize=None): """ Plot the autocorrelation function for each parameter of a sampler chain. :param chain: The sampled parameter values. :type chain: :class:`numpy.ndarray` :param index: [optional] Index to calculate the autocorrelation from. :type index: int :param limit: [optional] Maximum number of MCMC steps to display. By default half of the chain will be shown. :type limit: int :param fig: [optional] Figure class to use. :type fig: :class:`matplotlib.Figure` or None :param figsize: [optional] The figure size (x-dimension, y-dimension) in inches. :type figsize: tuple or None """ factor = 2.0 lbdim = 0.2 * factor trdim = 0.2 * factor whspace = 0.1 dimy = lbdim + factor + trdim dimx = lbdim + factor + trdim if fig is None: (fig, ax) = plt.subplots(figsize=figsize) # depends on [control=['if'], data=['fig']] else: ax = fig.axes[0] lm = lbdim / dimx bm = lbdim / dimy trm = (lbdim + factor) / dimy fig.subplots_adjust(left=lm, bottom=bm, right=trm, top=trm, wspace=whspace, hspace=whspace) # Calculate the autocorrelation function for each parameter num_parameters = chain.shape[2] for i in xrange(num_parameters): try: rho = emcee.autocorr.function(np.mean(chain[:, index:, i], axis=0)) # depends on [control=['try'], data=[]] except RuntimeError: logger.exception('Error in calculating auto-correlation function for parameter index {}'.format(i)) # depends on [control=['except'], data=[]] else: ax.plot(rho, 'k', lw=1) # depends on [control=['for'], data=['i']] if burn: ax.axvline(burn, linestyle=':', color='k') # depends on [control=['if'], data=[]] ax.xaxis.set_major_locator(MaxNLocator(5)) [l.set_rotation(45) for l in ax.get_xticklabels()] ax.set_yticks([-0.5, 0, 0.5, 1.0]) [l.set_rotation(45) for l in ax.get_yticklabels()] ax.axhline(0, color='k') ax.set_xlim(0, limit if limit is not None else chain.shape[1] - index) ax.set_xlabel('$\\tau$') ax.set_ylabel('Auto-correlation') fig.tight_layout() return fig
def validate(self): """ validate: Makes sure node is valid Args: None Returns: boolean indicating if node is valid """ from .files import File assert self.source_id is not None, "Assumption Failed: Node must have a source_id" assert isinstance(self.title, str), "Assumption Failed: Node title is not a string" assert isinstance(self.description, str) or self.description is None, "Assumption Failed: Node description is not a string" assert isinstance(self.children, list), "Assumption Failed: Node children is not a list" for f in self.files: assert isinstance(f, File), "Assumption Failed: files must be file class" f.validate() source_ids = [c.source_id for c in self.children] duplicates = set([x for x in source_ids if source_ids.count(x) > 1]) assert len(duplicates) == 0, "Assumption Failed: Node must have unique source id among siblings ({} appears multiple times)".format(duplicates) return True
def function[validate, parameter[self]]: constant[ validate: Makes sure node is valid Args: None Returns: boolean indicating if node is valid ] from relative_module[files] import module[File] assert[compare[name[self].source_id is_not constant[None]]] assert[call[name[isinstance], parameter[name[self].title, name[str]]]] assert[<ast.BoolOp object at 0x7da20c6c7490>] assert[call[name[isinstance], parameter[name[self].children, name[list]]]] for taget[name[f]] in starred[name[self].files] begin[:] assert[call[name[isinstance], parameter[name[f], name[File]]]] call[name[f].validate, parameter[]] variable[source_ids] assign[=] <ast.ListComp object at 0x7da20c6c50c0> variable[duplicates] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da20c6c43d0>]] assert[compare[call[name[len], parameter[name[duplicates]]] equal[==] constant[0]]] return[constant[True]]
keyword[def] identifier[validate] ( identifier[self] ): literal[string] keyword[from] . identifier[files] keyword[import] identifier[File] keyword[assert] identifier[self] . identifier[source_id] keyword[is] keyword[not] keyword[None] , literal[string] keyword[assert] identifier[isinstance] ( identifier[self] . identifier[title] , identifier[str] ), literal[string] keyword[assert] identifier[isinstance] ( identifier[self] . identifier[description] , identifier[str] ) keyword[or] identifier[self] . identifier[description] keyword[is] keyword[None] , literal[string] keyword[assert] identifier[isinstance] ( identifier[self] . identifier[children] , identifier[list] ), literal[string] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[files] : keyword[assert] identifier[isinstance] ( identifier[f] , identifier[File] ), literal[string] identifier[f] . identifier[validate] () identifier[source_ids] =[ identifier[c] . identifier[source_id] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[children] ] identifier[duplicates] = identifier[set] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[source_ids] keyword[if] identifier[source_ids] . identifier[count] ( identifier[x] )> literal[int] ]) keyword[assert] identifier[len] ( identifier[duplicates] )== literal[int] , literal[string] . identifier[format] ( identifier[duplicates] ) keyword[return] keyword[True]
def validate(self): """ validate: Makes sure node is valid Args: None Returns: boolean indicating if node is valid """ from .files import File assert self.source_id is not None, 'Assumption Failed: Node must have a source_id' assert isinstance(self.title, str), 'Assumption Failed: Node title is not a string' assert isinstance(self.description, str) or self.description is None, 'Assumption Failed: Node description is not a string' assert isinstance(self.children, list), 'Assumption Failed: Node children is not a list' for f in self.files: assert isinstance(f, File), 'Assumption Failed: files must be file class' f.validate() # depends on [control=['for'], data=['f']] source_ids = [c.source_id for c in self.children] duplicates = set([x for x in source_ids if source_ids.count(x) > 1]) assert len(duplicates) == 0, 'Assumption Failed: Node must have unique source id among siblings ({} appears multiple times)'.format(duplicates) return True
def event(self, utype, **kw): ''' Make a meta-event with a utype of @type. **@kw works the same as for pygame.event.Event(). ''' d = {'utype': utype} d.update(kw) pygame.event.post(pygame.event.Event(METAEVENT, d))
def function[event, parameter[self, utype]]: constant[ Make a meta-event with a utype of @type. **@kw works the same as for pygame.event.Event(). ] variable[d] assign[=] dictionary[[<ast.Constant object at 0x7da204565a50>], [<ast.Name object at 0x7da204565ab0>]] call[name[d].update, parameter[name[kw]]] call[name[pygame].event.post, parameter[call[name[pygame].event.Event, parameter[name[METAEVENT], name[d]]]]]
keyword[def] identifier[event] ( identifier[self] , identifier[utype] ,** identifier[kw] ): literal[string] identifier[d] ={ literal[string] : identifier[utype] } identifier[d] . identifier[update] ( identifier[kw] ) identifier[pygame] . identifier[event] . identifier[post] ( identifier[pygame] . identifier[event] . identifier[Event] ( identifier[METAEVENT] , identifier[d] ))
def event(self, utype, **kw): """ Make a meta-event with a utype of @type. **@kw works the same as for pygame.event.Event(). """ d = {'utype': utype} d.update(kw) pygame.event.post(pygame.event.Event(METAEVENT, d))
def get_function(self, name): """ Get a ValueRef pointing to the function named *name*. NameError is raised if the symbol isn't found. """ p = ffi.lib.LLVMPY_GetNamedFunction(self, _encode_string(name)) if not p: raise NameError(name) return ValueRef(p, 'function', dict(module=self))
def function[get_function, parameter[self, name]]: constant[ Get a ValueRef pointing to the function named *name*. NameError is raised if the symbol isn't found. ] variable[p] assign[=] call[name[ffi].lib.LLVMPY_GetNamedFunction, parameter[name[self], call[name[_encode_string], parameter[name[name]]]]] if <ast.UnaryOp object at 0x7da1b19ed1b0> begin[:] <ast.Raise object at 0x7da1b19eeaa0> return[call[name[ValueRef], parameter[name[p], constant[function], call[name[dict], parameter[]]]]]
keyword[def] identifier[get_function] ( identifier[self] , identifier[name] ): literal[string] identifier[p] = identifier[ffi] . identifier[lib] . identifier[LLVMPY_GetNamedFunction] ( identifier[self] , identifier[_encode_string] ( identifier[name] )) keyword[if] keyword[not] identifier[p] : keyword[raise] identifier[NameError] ( identifier[name] ) keyword[return] identifier[ValueRef] ( identifier[p] , literal[string] , identifier[dict] ( identifier[module] = identifier[self] ))
def get_function(self, name): """ Get a ValueRef pointing to the function named *name*. NameError is raised if the symbol isn't found. """ p = ffi.lib.LLVMPY_GetNamedFunction(self, _encode_string(name)) if not p: raise NameError(name) # depends on [control=['if'], data=[]] return ValueRef(p, 'function', dict(module=self))
def computeNormals(self): """Compute cell and vertex normals for the actor's mesh. .. warning:: Mesh gets modified, can have a different nr. of vertices. """ poly = self.polydata(False) pnormals = poly.GetPointData().GetNormals() cnormals = poly.GetCellData().GetNormals() if pnormals and cnormals: return self pdnorm = vtk.vtkPolyDataNormals() pdnorm.SetInputData(poly) pdnorm.ComputePointNormalsOn() pdnorm.ComputeCellNormalsOn() pdnorm.FlipNormalsOff() pdnorm.ConsistencyOn() pdnorm.Update() return self.updateMesh(pdnorm.GetOutput())
def function[computeNormals, parameter[self]]: constant[Compute cell and vertex normals for the actor's mesh. .. warning:: Mesh gets modified, can have a different nr. of vertices. ] variable[poly] assign[=] call[name[self].polydata, parameter[constant[False]]] variable[pnormals] assign[=] call[call[name[poly].GetPointData, parameter[]].GetNormals, parameter[]] variable[cnormals] assign[=] call[call[name[poly].GetCellData, parameter[]].GetNormals, parameter[]] if <ast.BoolOp object at 0x7da204620940> begin[:] return[name[self]] variable[pdnorm] assign[=] call[name[vtk].vtkPolyDataNormals, parameter[]] call[name[pdnorm].SetInputData, parameter[name[poly]]] call[name[pdnorm].ComputePointNormalsOn, parameter[]] call[name[pdnorm].ComputeCellNormalsOn, parameter[]] call[name[pdnorm].FlipNormalsOff, parameter[]] call[name[pdnorm].ConsistencyOn, parameter[]] call[name[pdnorm].Update, parameter[]] return[call[name[self].updateMesh, parameter[call[name[pdnorm].GetOutput, parameter[]]]]]
keyword[def] identifier[computeNormals] ( identifier[self] ): literal[string] identifier[poly] = identifier[self] . identifier[polydata] ( keyword[False] ) identifier[pnormals] = identifier[poly] . identifier[GetPointData] (). identifier[GetNormals] () identifier[cnormals] = identifier[poly] . identifier[GetCellData] (). identifier[GetNormals] () keyword[if] identifier[pnormals] keyword[and] identifier[cnormals] : keyword[return] identifier[self] identifier[pdnorm] = identifier[vtk] . identifier[vtkPolyDataNormals] () identifier[pdnorm] . identifier[SetInputData] ( identifier[poly] ) identifier[pdnorm] . identifier[ComputePointNormalsOn] () identifier[pdnorm] . identifier[ComputeCellNormalsOn] () identifier[pdnorm] . identifier[FlipNormalsOff] () identifier[pdnorm] . identifier[ConsistencyOn] () identifier[pdnorm] . identifier[Update] () keyword[return] identifier[self] . identifier[updateMesh] ( identifier[pdnorm] . identifier[GetOutput] ())
def computeNormals(self): """Compute cell and vertex normals for the actor's mesh. .. warning:: Mesh gets modified, can have a different nr. of vertices. """ poly = self.polydata(False) pnormals = poly.GetPointData().GetNormals() cnormals = poly.GetCellData().GetNormals() if pnormals and cnormals: return self # depends on [control=['if'], data=[]] pdnorm = vtk.vtkPolyDataNormals() pdnorm.SetInputData(poly) pdnorm.ComputePointNormalsOn() pdnorm.ComputeCellNormalsOn() pdnorm.FlipNormalsOff() pdnorm.ConsistencyOn() pdnorm.Update() return self.updateMesh(pdnorm.GetOutput())
def _get_optimal_thresholds(nd_dict, quantized_dtype, num_bins=8001, num_quantized_bins=255, logger=None): """Given a ndarray dict, find the optimal threshold for quantizing each value of the key.""" if stats is None: raise ImportError('scipy.stats is required for running entropy mode of calculating' ' the optimal thresholds for quantizing FP32 ndarrays into int8.' ' Please check if the scipy python bindings are installed.') assert isinstance(nd_dict, dict) if logger is not None: logger.info('Calculating optimal thresholds for quantization using KL divergence' ' with num_bins=%d and num_quantized_bins=%d' % (num_bins, num_quantized_bins)) th_dict = {} # copy nd_dict keys since the keys() only returns a view in python3 layer_names = list(nd_dict.keys()) for name in layer_names: assert name in nd_dict min_val, max_val, min_divergence, opt_th = \ _get_optimal_threshold(nd_dict[name], quantized_dtype, num_bins=num_bins, num_quantized_bins=num_quantized_bins) del nd_dict[name] # release the memory of ndarray if min_val < 0: th_dict[name] = (-opt_th, opt_th) else: th_dict[name] = (0, opt_th) if logger is not None: logger.info('layer=%s, min_val=%f, max_val=%f, min_divergence=%f, optimal_threshold=%f' % (name, min_val, max_val, min_divergence, opt_th)) return th_dict
def function[_get_optimal_thresholds, parameter[nd_dict, quantized_dtype, num_bins, num_quantized_bins, logger]]: constant[Given a ndarray dict, find the optimal threshold for quantizing each value of the key.] if compare[name[stats] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1f211e0> assert[call[name[isinstance], parameter[name[nd_dict], name[dict]]]] if compare[name[logger] is_not constant[None]] begin[:] call[name[logger].info, parameter[binary_operation[constant[Calculating optimal thresholds for quantization using KL divergence with num_bins=%d and num_quantized_bins=%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1f23280>, <ast.Name object at 0x7da1b1f23cd0>]]]]] variable[th_dict] assign[=] dictionary[[], []] variable[layer_names] assign[=] call[name[list], parameter[call[name[nd_dict].keys, parameter[]]]] for taget[name[name]] in starred[name[layer_names]] begin[:] assert[compare[name[name] in name[nd_dict]]] <ast.Tuple object at 0x7da1b1f20400> assign[=] call[name[_get_optimal_threshold], parameter[call[name[nd_dict]][name[name]], name[quantized_dtype]]] <ast.Delete object at 0x7da1b1f23700> if compare[name[min_val] less[<] constant[0]] begin[:] call[name[th_dict]][name[name]] assign[=] tuple[[<ast.UnaryOp object at 0x7da1b1f21ff0>, <ast.Name object at 0x7da1b1f233d0>]] if compare[name[logger] is_not constant[None]] begin[:] call[name[logger].info, parameter[binary_operation[constant[layer=%s, min_val=%f, max_val=%f, min_divergence=%f, optimal_threshold=%f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1f220b0>, <ast.Name object at 0x7da1b1f20eb0>, <ast.Name object at 0x7da1b1f22aa0>, <ast.Name object at 0x7da1b1f21ab0>, <ast.Name object at 0x7da1b1f23820>]]]]] return[name[th_dict]]
keyword[def] identifier[_get_optimal_thresholds] ( identifier[nd_dict] , identifier[quantized_dtype] , identifier[num_bins] = literal[int] , identifier[num_quantized_bins] = literal[int] , identifier[logger] = keyword[None] ): literal[string] keyword[if] identifier[stats] keyword[is] keyword[None] : keyword[raise] identifier[ImportError] ( literal[string] literal[string] literal[string] ) keyword[assert] identifier[isinstance] ( identifier[nd_dict] , identifier[dict] ) keyword[if] identifier[logger] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[info] ( literal[string] literal[string] %( identifier[num_bins] , identifier[num_quantized_bins] )) identifier[th_dict] ={} identifier[layer_names] = identifier[list] ( identifier[nd_dict] . identifier[keys] ()) keyword[for] identifier[name] keyword[in] identifier[layer_names] : keyword[assert] identifier[name] keyword[in] identifier[nd_dict] identifier[min_val] , identifier[max_val] , identifier[min_divergence] , identifier[opt_th] = identifier[_get_optimal_threshold] ( identifier[nd_dict] [ identifier[name] ], identifier[quantized_dtype] , identifier[num_bins] = identifier[num_bins] , identifier[num_quantized_bins] = identifier[num_quantized_bins] ) keyword[del] identifier[nd_dict] [ identifier[name] ] keyword[if] identifier[min_val] < literal[int] : identifier[th_dict] [ identifier[name] ]=(- identifier[opt_th] , identifier[opt_th] ) keyword[else] : identifier[th_dict] [ identifier[name] ]=( literal[int] , identifier[opt_th] ) keyword[if] identifier[logger] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[info] ( literal[string] %( identifier[name] , identifier[min_val] , identifier[max_val] , identifier[min_divergence] , identifier[opt_th] )) keyword[return] identifier[th_dict]
def _get_optimal_thresholds(nd_dict, quantized_dtype, num_bins=8001, num_quantized_bins=255, logger=None): """Given a ndarray dict, find the optimal threshold for quantizing each value of the key.""" if stats is None: raise ImportError('scipy.stats is required for running entropy mode of calculating the optimal thresholds for quantizing FP32 ndarrays into int8. Please check if the scipy python bindings are installed.') # depends on [control=['if'], data=[]] assert isinstance(nd_dict, dict) if logger is not None: logger.info('Calculating optimal thresholds for quantization using KL divergence with num_bins=%d and num_quantized_bins=%d' % (num_bins, num_quantized_bins)) # depends on [control=['if'], data=['logger']] th_dict = {} # copy nd_dict keys since the keys() only returns a view in python3 layer_names = list(nd_dict.keys()) for name in layer_names: assert name in nd_dict (min_val, max_val, min_divergence, opt_th) = _get_optimal_threshold(nd_dict[name], quantized_dtype, num_bins=num_bins, num_quantized_bins=num_quantized_bins) del nd_dict[name] # release the memory of ndarray if min_val < 0: th_dict[name] = (-opt_th, opt_th) # depends on [control=['if'], data=[]] else: th_dict[name] = (0, opt_th) if logger is not None: logger.info('layer=%s, min_val=%f, max_val=%f, min_divergence=%f, optimal_threshold=%f' % (name, min_val, max_val, min_divergence, opt_th)) # depends on [control=['if'], data=['logger']] # depends on [control=['for'], data=['name']] return th_dict
def to_vobject(self, filename=None, uid=None): """Return the vCard corresponding to the uid filename -- unused, for API compatibility only uid -- the UID to get (required) """ self._update() return self._to_vcard(self._book[uid.split('@')[0]])
def function[to_vobject, parameter[self, filename, uid]]: constant[Return the vCard corresponding to the uid filename -- unused, for API compatibility only uid -- the UID to get (required) ] call[name[self]._update, parameter[]] return[call[name[self]._to_vcard, parameter[call[name[self]._book][call[call[name[uid].split, parameter[constant[@]]]][constant[0]]]]]]
keyword[def] identifier[to_vobject] ( identifier[self] , identifier[filename] = keyword[None] , identifier[uid] = keyword[None] ): literal[string] identifier[self] . identifier[_update] () keyword[return] identifier[self] . identifier[_to_vcard] ( identifier[self] . identifier[_book] [ identifier[uid] . identifier[split] ( literal[string] )[ literal[int] ]])
def to_vobject(self, filename=None, uid=None): """Return the vCard corresponding to the uid filename -- unused, for API compatibility only uid -- the UID to get (required) """ self._update() return self._to_vcard(self._book[uid.split('@')[0]])
def InitFromApiFlow(self, f, cron_job_id=None): """Shortcut method for easy legacy cron jobs support.""" if f.flow_id: self.run_id = f.flow_id elif f.urn: self.run_id = f.urn.Basename() self.started_at = f.started_at self.cron_job_id = cron_job_id flow_state_enum = api_plugins_flow.ApiFlow.State cron_enum = rdf_cronjobs.CronJobRun.CronJobRunStatus errors_map = { flow_state_enum.RUNNING: cron_enum.RUNNING, flow_state_enum.TERMINATED: cron_enum.FINISHED, flow_state_enum.ERROR: cron_enum.ERROR, flow_state_enum.CLIENT_CRASHED: cron_enum.ERROR } self.status = errors_map[f.state] if f.state != f.State.RUNNING: self.finished_at = f.last_active_at if f.context.kill_timestamp: self.status = self.Status.LIFETIME_EXCEEDED if f.context.HasField("status"): self.log_message = f.context.status if f.context.HasField("backtrace"): self.backtrace = f.context.backtrace return self
def function[InitFromApiFlow, parameter[self, f, cron_job_id]]: constant[Shortcut method for easy legacy cron jobs support.] if name[f].flow_id begin[:] name[self].run_id assign[=] name[f].flow_id name[self].started_at assign[=] name[f].started_at name[self].cron_job_id assign[=] name[cron_job_id] variable[flow_state_enum] assign[=] name[api_plugins_flow].ApiFlow.State variable[cron_enum] assign[=] name[rdf_cronjobs].CronJobRun.CronJobRunStatus variable[errors_map] assign[=] dictionary[[<ast.Attribute object at 0x7da1b1b2a590>, <ast.Attribute object at 0x7da1b1b2a8f0>, <ast.Attribute object at 0x7da1b1b6fd60>, <ast.Attribute object at 0x7da1b1b6ce80>], [<ast.Attribute object at 0x7da1b1b6ea70>, <ast.Attribute object at 0x7da1b1b6fe80>, <ast.Attribute object at 0x7da1b1b6cdf0>, <ast.Attribute object at 0x7da1b1b6df60>]] name[self].status assign[=] call[name[errors_map]][name[f].state] if compare[name[f].state not_equal[!=] name[f].State.RUNNING] begin[:] name[self].finished_at assign[=] name[f].last_active_at if name[f].context.kill_timestamp begin[:] name[self].status assign[=] name[self].Status.LIFETIME_EXCEEDED if call[name[f].context.HasField, parameter[constant[status]]] begin[:] name[self].log_message assign[=] name[f].context.status if call[name[f].context.HasField, parameter[constant[backtrace]]] begin[:] name[self].backtrace assign[=] name[f].context.backtrace return[name[self]]
keyword[def] identifier[InitFromApiFlow] ( identifier[self] , identifier[f] , identifier[cron_job_id] = keyword[None] ): literal[string] keyword[if] identifier[f] . identifier[flow_id] : identifier[self] . identifier[run_id] = identifier[f] . identifier[flow_id] keyword[elif] identifier[f] . identifier[urn] : identifier[self] . identifier[run_id] = identifier[f] . identifier[urn] . identifier[Basename] () identifier[self] . identifier[started_at] = identifier[f] . identifier[started_at] identifier[self] . identifier[cron_job_id] = identifier[cron_job_id] identifier[flow_state_enum] = identifier[api_plugins_flow] . identifier[ApiFlow] . identifier[State] identifier[cron_enum] = identifier[rdf_cronjobs] . identifier[CronJobRun] . identifier[CronJobRunStatus] identifier[errors_map] ={ identifier[flow_state_enum] . identifier[RUNNING] : identifier[cron_enum] . identifier[RUNNING] , identifier[flow_state_enum] . identifier[TERMINATED] : identifier[cron_enum] . identifier[FINISHED] , identifier[flow_state_enum] . identifier[ERROR] : identifier[cron_enum] . identifier[ERROR] , identifier[flow_state_enum] . identifier[CLIENT_CRASHED] : identifier[cron_enum] . identifier[ERROR] } identifier[self] . identifier[status] = identifier[errors_map] [ identifier[f] . identifier[state] ] keyword[if] identifier[f] . identifier[state] != identifier[f] . identifier[State] . identifier[RUNNING] : identifier[self] . identifier[finished_at] = identifier[f] . identifier[last_active_at] keyword[if] identifier[f] . identifier[context] . identifier[kill_timestamp] : identifier[self] . identifier[status] = identifier[self] . identifier[Status] . identifier[LIFETIME_EXCEEDED] keyword[if] identifier[f] . identifier[context] . identifier[HasField] ( literal[string] ): identifier[self] . identifier[log_message] = identifier[f] . identifier[context] . identifier[status] keyword[if] identifier[f] . identifier[context] . identifier[HasField] ( literal[string] ): identifier[self] . identifier[backtrace] = identifier[f] . identifier[context] . identifier[backtrace] keyword[return] identifier[self]
def InitFromApiFlow(self, f, cron_job_id=None): """Shortcut method for easy legacy cron jobs support.""" if f.flow_id: self.run_id = f.flow_id # depends on [control=['if'], data=[]] elif f.urn: self.run_id = f.urn.Basename() # depends on [control=['if'], data=[]] self.started_at = f.started_at self.cron_job_id = cron_job_id flow_state_enum = api_plugins_flow.ApiFlow.State cron_enum = rdf_cronjobs.CronJobRun.CronJobRunStatus errors_map = {flow_state_enum.RUNNING: cron_enum.RUNNING, flow_state_enum.TERMINATED: cron_enum.FINISHED, flow_state_enum.ERROR: cron_enum.ERROR, flow_state_enum.CLIENT_CRASHED: cron_enum.ERROR} self.status = errors_map[f.state] if f.state != f.State.RUNNING: self.finished_at = f.last_active_at # depends on [control=['if'], data=[]] if f.context.kill_timestamp: self.status = self.Status.LIFETIME_EXCEEDED # depends on [control=['if'], data=[]] if f.context.HasField('status'): self.log_message = f.context.status # depends on [control=['if'], data=[]] if f.context.HasField('backtrace'): self.backtrace = f.context.backtrace # depends on [control=['if'], data=[]] return self
def disease_comment(self, comment=None, entry_name=None, limit=None, as_df=False): """Method to query :class:`.models.DiseaseComment` objects in database :param comment: Comment(s) to disease :type comment: str or tuple(str) or None :param entry_name: name(s) in :class:`.models.Entry` :type entry_name: str or tuple(str) or None :param limit: - if `isinstance(limit,int)==True` -> limit - if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page) - if limit == None -> all results :type limit: int or tuple(int) or None :param bool as_df: if `True` results are returned as :class:`pandas.DataFrame` :return: - if `as_df == False` -> list(:class:`.models.DiseaseComment`) - if `as_df == True` -> :class:`pandas.DataFrame` :rtype: list(:class:`.models.DiseaseComment`) or :class:`pandas.DataFrame` """ q = self.session.query(models.DiseaseComment) q = self.get_model_queries(q, ((comment, models.DiseaseComment.comment),)) q = self.get_one_to_many_queries(q, ((entry_name, models.Entry.name),)) return self._limit_and_df(q, limit, as_df)
def function[disease_comment, parameter[self, comment, entry_name, limit, as_df]]: constant[Method to query :class:`.models.DiseaseComment` objects in database :param comment: Comment(s) to disease :type comment: str or tuple(str) or None :param entry_name: name(s) in :class:`.models.Entry` :type entry_name: str or tuple(str) or None :param limit: - if `isinstance(limit,int)==True` -> limit - if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page) - if limit == None -> all results :type limit: int or tuple(int) or None :param bool as_df: if `True` results are returned as :class:`pandas.DataFrame` :return: - if `as_df == False` -> list(:class:`.models.DiseaseComment`) - if `as_df == True` -> :class:`pandas.DataFrame` :rtype: list(:class:`.models.DiseaseComment`) or :class:`pandas.DataFrame` ] variable[q] assign[=] call[name[self].session.query, parameter[name[models].DiseaseComment]] variable[q] assign[=] call[name[self].get_model_queries, parameter[name[q], tuple[[<ast.Tuple object at 0x7da207f007f0>]]]] variable[q] assign[=] call[name[self].get_one_to_many_queries, parameter[name[q], tuple[[<ast.Tuple object at 0x7da207f004c0>]]]] return[call[name[self]._limit_and_df, parameter[name[q], name[limit], name[as_df]]]]
keyword[def] identifier[disease_comment] ( identifier[self] , identifier[comment] = keyword[None] , identifier[entry_name] = keyword[None] , identifier[limit] = keyword[None] , identifier[as_df] = keyword[False] ): literal[string] identifier[q] = identifier[self] . identifier[session] . identifier[query] ( identifier[models] . identifier[DiseaseComment] ) identifier[q] = identifier[self] . identifier[get_model_queries] ( identifier[q] ,(( identifier[comment] , identifier[models] . identifier[DiseaseComment] . identifier[comment] ),)) identifier[q] = identifier[self] . identifier[get_one_to_many_queries] ( identifier[q] ,(( identifier[entry_name] , identifier[models] . identifier[Entry] . identifier[name] ),)) keyword[return] identifier[self] . identifier[_limit_and_df] ( identifier[q] , identifier[limit] , identifier[as_df] )
def disease_comment(self, comment=None, entry_name=None, limit=None, as_df=False): """Method to query :class:`.models.DiseaseComment` objects in database :param comment: Comment(s) to disease :type comment: str or tuple(str) or None :param entry_name: name(s) in :class:`.models.Entry` :type entry_name: str or tuple(str) or None :param limit: - if `isinstance(limit,int)==True` -> limit - if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page) - if limit == None -> all results :type limit: int or tuple(int) or None :param bool as_df: if `True` results are returned as :class:`pandas.DataFrame` :return: - if `as_df == False` -> list(:class:`.models.DiseaseComment`) - if `as_df == True` -> :class:`pandas.DataFrame` :rtype: list(:class:`.models.DiseaseComment`) or :class:`pandas.DataFrame` """ q = self.session.query(models.DiseaseComment) q = self.get_model_queries(q, ((comment, models.DiseaseComment.comment),)) q = self.get_one_to_many_queries(q, ((entry_name, models.Entry.name),)) return self._limit_and_df(q, limit, as_df)
def _set_enhanced_voq_max_queue_depth(self, v, load=False): """ Setter method for enhanced_voq_max_queue_depth, mapped from YANG variable /telemetry/profile/enhanced_voq_max_queue_depth (list) If this variable is read-only (config: false) in the source YANG file, then _set_enhanced_voq_max_queue_depth is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_enhanced_voq_max_queue_depth() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("name",enhanced_voq_max_queue_depth.enhanced_voq_max_queue_depth, yang_name="enhanced-voq-max-queue-depth", rest_name="enhanced-voq-max-queue-depth", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'cli-full-command': None, u'cli-suppress-list-no': None, u'callpoint': u'EnhancedVoqMaxQueueDepthProfile', u'info': u'Enhanced VOQ max queue depth'}}), is_container='list', yang_name="enhanced-voq-max-queue-depth", rest_name="enhanced-voq-max-queue-depth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-suppress-list-no': None, u'callpoint': u'EnhancedVoqMaxQueueDepthProfile', u'info': u'Enhanced VOQ max queue depth'}}, namespace='urn:brocade.com:mgmt:brocade-telemetry', defining_module='brocade-telemetry', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """enhanced_voq_max_queue_depth must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("name",enhanced_voq_max_queue_depth.enhanced_voq_max_queue_depth, yang_name="enhanced-voq-max-queue-depth", rest_name="enhanced-voq-max-queue-depth", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'cli-full-command': None, u'cli-suppress-list-no': None, u'callpoint': u'EnhancedVoqMaxQueueDepthProfile', u'info': u'Enhanced VOQ max queue depth'}}), is_container='list', yang_name="enhanced-voq-max-queue-depth", rest_name="enhanced-voq-max-queue-depth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-suppress-list-no': None, u'callpoint': u'EnhancedVoqMaxQueueDepthProfile', u'info': u'Enhanced VOQ max queue depth'}}, namespace='urn:brocade.com:mgmt:brocade-telemetry', defining_module='brocade-telemetry', yang_type='list', is_config=True)""", }) self.__enhanced_voq_max_queue_depth = t if hasattr(self, '_set'): self._set()
def function[_set_enhanced_voq_max_queue_depth, parameter[self, v, load]]: constant[ Setter method for enhanced_voq_max_queue_depth, mapped from YANG variable /telemetry/profile/enhanced_voq_max_queue_depth (list) If this variable is read-only (config: false) in the source YANG file, then _set_enhanced_voq_max_queue_depth is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_enhanced_voq_max_queue_depth() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18f723a90> name[self].__enhanced_voq_max_queue_depth assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_enhanced_voq_max_queue_depth] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[enhanced_voq_max_queue_depth] . identifier[enhanced_voq_max_queue_depth] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__enhanced_voq_max_queue_depth] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_enhanced_voq_max_queue_depth(self, v, load=False): """ Setter method for enhanced_voq_max_queue_depth, mapped from YANG variable /telemetry/profile/enhanced_voq_max_queue_depth (list) If this variable is read-only (config: false) in the source YANG file, then _set_enhanced_voq_max_queue_depth is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_enhanced_voq_max_queue_depth() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('name', enhanced_voq_max_queue_depth.enhanced_voq_max_queue_depth, yang_name='enhanced-voq-max-queue-depth', rest_name='enhanced-voq-max-queue-depth', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='name', extensions={u'tailf-common': {u'cli-full-command': None, u'cli-suppress-list-no': None, u'callpoint': u'EnhancedVoqMaxQueueDepthProfile', u'info': u'Enhanced VOQ max queue depth'}}), is_container='list', yang_name='enhanced-voq-max-queue-depth', rest_name='enhanced-voq-max-queue-depth', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'cli-suppress-list-no': None, u'callpoint': u'EnhancedVoqMaxQueueDepthProfile', u'info': u'Enhanced VOQ max queue depth'}}, namespace='urn:brocade.com:mgmt:brocade-telemetry', defining_module='brocade-telemetry', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'enhanced_voq_max_queue_depth must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("name",enhanced_voq_max_queue_depth.enhanced_voq_max_queue_depth, yang_name="enhanced-voq-max-queue-depth", rest_name="enhanced-voq-max-queue-depth", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'name\', extensions={u\'tailf-common\': {u\'cli-full-command\': None, u\'cli-suppress-list-no\': None, u\'callpoint\': u\'EnhancedVoqMaxQueueDepthProfile\', u\'info\': u\'Enhanced VOQ max queue depth\'}}), is_container=\'list\', yang_name="enhanced-voq-max-queue-depth", rest_name="enhanced-voq-max-queue-depth", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-full-command\': None, u\'cli-suppress-list-no\': None, u\'callpoint\': u\'EnhancedVoqMaxQueueDepthProfile\', u\'info\': u\'Enhanced VOQ max queue depth\'}}, namespace=\'urn:brocade.com:mgmt:brocade-telemetry\', defining_module=\'brocade-telemetry\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__enhanced_voq_max_queue_depth = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def sort_schemas(schemas): """Sort a list of SQL schemas in order""" def keyfun(v): x = SQL_SCHEMA_REGEXP.match(v).groups() # x3: 'DEV' should come before '' return (int(x[0]), x[1], int(x[2]) if x[2] else None, x[3] if x[3] else 'zzz', int(x[4])) return sorted(schemas, key=keyfun)
def function[sort_schemas, parameter[schemas]]: constant[Sort a list of SQL schemas in order] def function[keyfun, parameter[v]]: variable[x] assign[=] call[call[name[SQL_SCHEMA_REGEXP].match, parameter[name[v]]].groups, parameter[]] return[tuple[[<ast.Call object at 0x7da18fe933d0>, <ast.Subscript object at 0x7da18fe90ac0>, <ast.IfExp object at 0x7da18fe931f0>, <ast.IfExp object at 0x7da18fe917b0>, <ast.Call object at 0x7da18fe92860>]]] return[call[name[sorted], parameter[name[schemas]]]]
keyword[def] identifier[sort_schemas] ( identifier[schemas] ): literal[string] keyword[def] identifier[keyfun] ( identifier[v] ): identifier[x] = identifier[SQL_SCHEMA_REGEXP] . identifier[match] ( identifier[v] ). identifier[groups] () keyword[return] ( identifier[int] ( identifier[x] [ literal[int] ]), identifier[x] [ literal[int] ], identifier[int] ( identifier[x] [ literal[int] ]) keyword[if] identifier[x] [ literal[int] ] keyword[else] keyword[None] , identifier[x] [ literal[int] ] keyword[if] identifier[x] [ literal[int] ] keyword[else] literal[string] , identifier[int] ( identifier[x] [ literal[int] ])) keyword[return] identifier[sorted] ( identifier[schemas] , identifier[key] = identifier[keyfun] )
def sort_schemas(schemas): """Sort a list of SQL schemas in order""" def keyfun(v): x = SQL_SCHEMA_REGEXP.match(v).groups() # x3: 'DEV' should come before '' return (int(x[0]), x[1], int(x[2]) if x[2] else None, x[3] if x[3] else 'zzz', int(x[4])) return sorted(schemas, key=keyfun)
def textbox(message='', title='', text='', codebox=0): """Original doc: Display some text in a proportional font with line wrapping at word breaks. This function is suitable for displaying general written text. The text parameter should be a string, or a list or tuple of lines to be displayed in the textbox. """ return psidialogs.text(message=message, title=title, text=text)
def function[textbox, parameter[message, title, text, codebox]]: constant[Original doc: Display some text in a proportional font with line wrapping at word breaks. This function is suitable for displaying general written text. The text parameter should be a string, or a list or tuple of lines to be displayed in the textbox. ] return[call[name[psidialogs].text, parameter[]]]
keyword[def] identifier[textbox] ( identifier[message] = literal[string] , identifier[title] = literal[string] , identifier[text] = literal[string] , identifier[codebox] = literal[int] ): literal[string] keyword[return] identifier[psidialogs] . identifier[text] ( identifier[message] = identifier[message] , identifier[title] = identifier[title] , identifier[text] = identifier[text] )
def textbox(message='', title='', text='', codebox=0): """Original doc: Display some text in a proportional font with line wrapping at word breaks. This function is suitable for displaying general written text. The text parameter should be a string, or a list or tuple of lines to be displayed in the textbox. """ return psidialogs.text(message=message, title=title, text=text)
def contains(self, name): """Checks if the specified bucket exists. Args: name: the name of the bucket to lookup. Returns: True if the bucket exists; False otherwise. Raises: Exception if there was an error requesting information about the bucket. """ try: self._api.buckets_get(name) except google.datalab.utils.RequestException as e: if e.status == 404: return False raise e except Exception as e: raise e return True
def function[contains, parameter[self, name]]: constant[Checks if the specified bucket exists. Args: name: the name of the bucket to lookup. Returns: True if the bucket exists; False otherwise. Raises: Exception if there was an error requesting information about the bucket. ] <ast.Try object at 0x7da18ede69b0> return[constant[True]]
keyword[def] identifier[contains] ( identifier[self] , identifier[name] ): literal[string] keyword[try] : identifier[self] . identifier[_api] . identifier[buckets_get] ( identifier[name] ) keyword[except] identifier[google] . identifier[datalab] . identifier[utils] . identifier[RequestException] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[status] == literal[int] : keyword[return] keyword[False] keyword[raise] identifier[e] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[e] keyword[return] keyword[True]
def contains(self, name): """Checks if the specified bucket exists. Args: name: the name of the bucket to lookup. Returns: True if the bucket exists; False otherwise. Raises: Exception if there was an error requesting information about the bucket. """ try: self._api.buckets_get(name) # depends on [control=['try'], data=[]] except google.datalab.utils.RequestException as e: if e.status == 404: return False # depends on [control=['if'], data=[]] raise e # depends on [control=['except'], data=['e']] except Exception as e: raise e # depends on [control=['except'], data=['e']] return True
def _vertically_size_cells(rendered_rows): """Grow row heights to cater for vertically spanned cells that do not fit in the available space.""" for r, rendered_row in enumerate(rendered_rows): for rendered_cell in rendered_row: if rendered_cell.rowspan > 1: row_height = sum(row.height for row in rendered_rows[r:r + rendered_cell.rowspan]) extra_height_needed = rendered_cell.height - row_height if extra_height_needed > 0: padding = extra_height_needed / rendered_cell.rowspan for i in range(r, r + rendered_cell.rowspan): rendered_rows[i].height += padding return rendered_rows
def function[_vertically_size_cells, parameter[rendered_rows]]: constant[Grow row heights to cater for vertically spanned cells that do not fit in the available space.] for taget[tuple[[<ast.Name object at 0x7da1b26ad270>, <ast.Name object at 0x7da1b26ac3d0>]]] in starred[call[name[enumerate], parameter[name[rendered_rows]]]] begin[:] for taget[name[rendered_cell]] in starred[name[rendered_row]] begin[:] if compare[name[rendered_cell].rowspan greater[>] constant[1]] begin[:] variable[row_height] assign[=] call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18f00e9b0>]] variable[extra_height_needed] assign[=] binary_operation[name[rendered_cell].height - name[row_height]] if compare[name[extra_height_needed] greater[>] constant[0]] begin[:] variable[padding] assign[=] binary_operation[name[extra_height_needed] / name[rendered_cell].rowspan] for taget[name[i]] in starred[call[name[range], parameter[name[r], binary_operation[name[r] + name[rendered_cell].rowspan]]]] begin[:] <ast.AugAssign object at 0x7da204565b40> return[name[rendered_rows]]
keyword[def] identifier[_vertically_size_cells] ( identifier[rendered_rows] ): literal[string] keyword[for] identifier[r] , identifier[rendered_row] keyword[in] identifier[enumerate] ( identifier[rendered_rows] ): keyword[for] identifier[rendered_cell] keyword[in] identifier[rendered_row] : keyword[if] identifier[rendered_cell] . identifier[rowspan] > literal[int] : identifier[row_height] = identifier[sum] ( identifier[row] . identifier[height] keyword[for] identifier[row] keyword[in] identifier[rendered_rows] [ identifier[r] : identifier[r] + identifier[rendered_cell] . identifier[rowspan] ]) identifier[extra_height_needed] = identifier[rendered_cell] . identifier[height] - identifier[row_height] keyword[if] identifier[extra_height_needed] > literal[int] : identifier[padding] = identifier[extra_height_needed] / identifier[rendered_cell] . identifier[rowspan] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[r] , identifier[r] + identifier[rendered_cell] . identifier[rowspan] ): identifier[rendered_rows] [ identifier[i] ]. identifier[height] += identifier[padding] keyword[return] identifier[rendered_rows]
def _vertically_size_cells(rendered_rows): """Grow row heights to cater for vertically spanned cells that do not fit in the available space.""" for (r, rendered_row) in enumerate(rendered_rows): for rendered_cell in rendered_row: if rendered_cell.rowspan > 1: row_height = sum((row.height for row in rendered_rows[r:r + rendered_cell.rowspan])) extra_height_needed = rendered_cell.height - row_height if extra_height_needed > 0: padding = extra_height_needed / rendered_cell.rowspan for i in range(r, r + rendered_cell.rowspan): rendered_rows[i].height += padding # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['extra_height_needed']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rendered_cell']] # depends on [control=['for'], data=[]] return rendered_rows
def get_enum_doc(elt, full_name:str)->str: "Formatted enum documentation." vals = ', '.join(elt.__members__.keys()) return f'{code_esc(full_name)}',f'<code>Enum</code> = [{vals}]'
def function[get_enum_doc, parameter[elt, full_name]]: constant[Formatted enum documentation.] variable[vals] assign[=] call[constant[, ].join, parameter[call[name[elt].__members__.keys, parameter[]]]] return[tuple[[<ast.JoinedStr object at 0x7da1b1e9a710>, <ast.JoinedStr object at 0x7da1b1e9ac80>]]]
keyword[def] identifier[get_enum_doc] ( identifier[elt] , identifier[full_name] : identifier[str] )-> identifier[str] : literal[string] identifier[vals] = literal[string] . identifier[join] ( identifier[elt] . identifier[__members__] . identifier[keys] ()) keyword[return] literal[string] , literal[string]
def get_enum_doc(elt, full_name: str) -> str: """Formatted enum documentation.""" vals = ', '.join(elt.__members__.keys()) return (f'{code_esc(full_name)}', f'<code>Enum</code> = [{vals}]')
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): """Simple method to exact date values from a Plist. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. """ for root, key, datetime_value in interface.RecurseKey(top_level): if not isinstance(datetime_value, datetime.datetime): continue event_data = plist_event.PlistTimeEventData() event_data.key = key event_data.root = root event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def function[GetEntries, parameter[self, parser_mediator, top_level]]: constant[Simple method to exact date values from a Plist. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. ] for taget[tuple[[<ast.Name object at 0x7da204565840>, <ast.Name object at 0x7da204565f60>, <ast.Name object at 0x7da204565a80>]]] in starred[call[name[interface].RecurseKey, parameter[name[top_level]]]] begin[:] if <ast.UnaryOp object at 0x7da2045663b0> begin[:] continue variable[event_data] assign[=] call[name[plist_event].PlistTimeEventData, parameter[]] name[event_data].key assign[=] name[key] name[event_data].root assign[=] name[root] variable[event] assign[=] call[name[time_events].PythonDatetimeEvent, parameter[name[datetime_value], name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]]
keyword[def] identifier[GetEntries] ( identifier[self] , identifier[parser_mediator] , identifier[top_level] = keyword[None] ,** identifier[unused_kwargs] ): literal[string] keyword[for] identifier[root] , identifier[key] , identifier[datetime_value] keyword[in] identifier[interface] . identifier[RecurseKey] ( identifier[top_level] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[datetime_value] , identifier[datetime] . identifier[datetime] ): keyword[continue] identifier[event_data] = identifier[plist_event] . identifier[PlistTimeEventData] () identifier[event_data] . identifier[key] = identifier[key] identifier[event_data] . identifier[root] = identifier[root] identifier[event] = identifier[time_events] . identifier[PythonDatetimeEvent] ( identifier[datetime_value] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] )
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): """Simple method to exact date values from a Plist. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key. """ for (root, key, datetime_value) in interface.RecurseKey(top_level): if not isinstance(datetime_value, datetime.datetime): continue # depends on [control=['if'], data=[]] event_data = plist_event.PlistTimeEventData() event_data.key = key event_data.root = root event = time_events.PythonDatetimeEvent(datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['for'], data=[]]
def collision_rate(Temperature, element, isotope): r"""This function recieves the temperature of an atomic vapour (in Kelvin), the element, and the isotope of the atoms, and returns the angular frequency rate of collisions (in rad/s) in a vapour assuming a Maxwell-Boltzmann velocity distribution, and taking the cross section of the collision to be sigma=pi*(2*r)**2 where r is the atomic radius. colission rate returned is gamma_col=2*pi* ( sigma * v * n ) where v is the average velocity of the distribution, and n is the number density of the vapour. A few examples (in Hz): >>> print collision_rate(25 + 273.15, "Cs", 133)/2/pi 9.0607260277 For cesium collisions become important for temperatures above 120 Celsius. >>> print collision_rate(120 + 273.15, "Cs", 133)/2/pi 10519.235289 """ atom=Atom(element,isotope) sigma=pi*(2*atom.radius)**2 v=speed_average(Temperature,element,isotope) n=vapour_number_density(Temperature,element) return 2*pi*sigma*v*n
def function[collision_rate, parameter[Temperature, element, isotope]]: constant[This function recieves the temperature of an atomic vapour (in Kelvin), the element, and the isotope of the atoms, and returns the angular frequency rate of collisions (in rad/s) in a vapour assuming a Maxwell-Boltzmann velocity distribution, and taking the cross section of the collision to be sigma=pi*(2*r)**2 where r is the atomic radius. colission rate returned is gamma_col=2*pi* ( sigma * v * n ) where v is the average velocity of the distribution, and n is the number density of the vapour. A few examples (in Hz): >>> print collision_rate(25 + 273.15, "Cs", 133)/2/pi 9.0607260277 For cesium collisions become important for temperatures above 120 Celsius. >>> print collision_rate(120 + 273.15, "Cs", 133)/2/pi 10519.235289 ] variable[atom] assign[=] call[name[Atom], parameter[name[element], name[isotope]]] variable[sigma] assign[=] binary_operation[name[pi] * binary_operation[binary_operation[constant[2] * name[atom].radius] ** constant[2]]] variable[v] assign[=] call[name[speed_average], parameter[name[Temperature], name[element], name[isotope]]] variable[n] assign[=] call[name[vapour_number_density], parameter[name[Temperature], name[element]]] return[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[pi]] * name[sigma]] * name[v]] * name[n]]]
keyword[def] identifier[collision_rate] ( identifier[Temperature] , identifier[element] , identifier[isotope] ): literal[string] identifier[atom] = identifier[Atom] ( identifier[element] , identifier[isotope] ) identifier[sigma] = identifier[pi] *( literal[int] * identifier[atom] . identifier[radius] )** literal[int] identifier[v] = identifier[speed_average] ( identifier[Temperature] , identifier[element] , identifier[isotope] ) identifier[n] = identifier[vapour_number_density] ( identifier[Temperature] , identifier[element] ) keyword[return] literal[int] * identifier[pi] * identifier[sigma] * identifier[v] * identifier[n]
def collision_rate(Temperature, element, isotope): """This function recieves the temperature of an atomic vapour (in Kelvin), the element, and the isotope of the atoms, and returns the angular frequency rate of collisions (in rad/s) in a vapour assuming a Maxwell-Boltzmann velocity distribution, and taking the cross section of the collision to be sigma=pi*(2*r)**2 where r is the atomic radius. colission rate returned is gamma_col=2*pi* ( sigma * v * n ) where v is the average velocity of the distribution, and n is the number density of the vapour. A few examples (in Hz): >>> print collision_rate(25 + 273.15, "Cs", 133)/2/pi 9.0607260277 For cesium collisions become important for temperatures above 120 Celsius. >>> print collision_rate(120 + 273.15, "Cs", 133)/2/pi 10519.235289 """ atom = Atom(element, isotope) sigma = pi * (2 * atom.radius) ** 2 v = speed_average(Temperature, element, isotope) n = vapour_number_density(Temperature, element) return 2 * pi * sigma * v * n
def remove_factualitylayer_layer(self): """ Removes the factualitylayer layer (the old version) (if exists) of the object (in memory) """ if self.factuality_layer is not None: this_node = self.factuality_layer.get_node() self.root.remove(this_node) self.factuality_layer = None if self.header is not None: self.header.remove_lp('factualitylayer')
def function[remove_factualitylayer_layer, parameter[self]]: constant[ Removes the factualitylayer layer (the old version) (if exists) of the object (in memory) ] if compare[name[self].factuality_layer is_not constant[None]] begin[:] variable[this_node] assign[=] call[name[self].factuality_layer.get_node, parameter[]] call[name[self].root.remove, parameter[name[this_node]]] name[self].factuality_layer assign[=] constant[None] if compare[name[self].header is_not constant[None]] begin[:] call[name[self].header.remove_lp, parameter[constant[factualitylayer]]]
keyword[def] identifier[remove_factualitylayer_layer] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[factuality_layer] keyword[is] keyword[not] keyword[None] : identifier[this_node] = identifier[self] . identifier[factuality_layer] . identifier[get_node] () identifier[self] . identifier[root] . identifier[remove] ( identifier[this_node] ) identifier[self] . identifier[factuality_layer] = keyword[None] keyword[if] identifier[self] . identifier[header] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[header] . identifier[remove_lp] ( literal[string] )
def remove_factualitylayer_layer(self): """ Removes the factualitylayer layer (the old version) (if exists) of the object (in memory) """ if self.factuality_layer is not None: this_node = self.factuality_layer.get_node() self.root.remove(this_node) self.factuality_layer = None # depends on [control=['if'], data=[]] if self.header is not None: self.header.remove_lp('factualitylayer') # depends on [control=['if'], data=[]]
def is_present_no_wait(self, locator): """ Determines whether an element is present on the page with no wait @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string used to query the element """ # first attempt to locate the element def execute(): ''' Generic function to execute wait ''' return True if len(self.locator_handler.find_by_locator(self.driver, locator, True)) < 0 else False return self.execute_and_handle_webdriver_exceptions( execute, timeout=0, locator=locator, failure_message='Error running webdriver.find_all.')
def function[is_present_no_wait, parameter[self, locator]]: constant[ Determines whether an element is present on the page with no wait @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string used to query the element ] def function[execute, parameter[]]: constant[ Generic function to execute wait ] return[<ast.IfExp object at 0x7da1b10d5330>] return[call[name[self].execute_and_handle_webdriver_exceptions, parameter[name[execute]]]]
keyword[def] identifier[is_present_no_wait] ( identifier[self] , identifier[locator] ): literal[string] keyword[def] identifier[execute] (): literal[string] keyword[return] keyword[True] keyword[if] identifier[len] ( identifier[self] . identifier[locator_handler] . identifier[find_by_locator] ( identifier[self] . identifier[driver] , identifier[locator] , keyword[True] ))< literal[int] keyword[else] keyword[False] keyword[return] identifier[self] . identifier[execute_and_handle_webdriver_exceptions] ( identifier[execute] , identifier[timeout] = literal[int] , identifier[locator] = identifier[locator] , identifier[failure_message] = literal[string] )
def is_present_no_wait(self, locator): """ Determines whether an element is present on the page with no wait @type locator: webdriverwrapper.support.locator.Locator @param locator: the locator or css string used to query the element """ # first attempt to locate the element def execute(): """ Generic function to execute wait """ return True if len(self.locator_handler.find_by_locator(self.driver, locator, True)) < 0 else False return self.execute_and_handle_webdriver_exceptions(execute, timeout=0, locator=locator, failure_message='Error running webdriver.find_all.')
def process_exception(self, request, exception): """Report exceptions from requests via Exreporter. """ gc = GithubCredentials( user=settings.EXREPORTER_GITHUB_USER, repo=settings.EXREPORTER_GITHUB_REPO, auth_token=settings.EXREPORTER_GITHUB_AUTH_TOKEN) gs = GithubStore(credentials=gc) reporter = ExReporter( store=gs, labels=settings.EXREPORTER_GITHUB_LABELS) reporter.report()
def function[process_exception, parameter[self, request, exception]]: constant[Report exceptions from requests via Exreporter. ] variable[gc] assign[=] call[name[GithubCredentials], parameter[]] variable[gs] assign[=] call[name[GithubStore], parameter[]] variable[reporter] assign[=] call[name[ExReporter], parameter[]] call[name[reporter].report, parameter[]]
keyword[def] identifier[process_exception] ( identifier[self] , identifier[request] , identifier[exception] ): literal[string] identifier[gc] = identifier[GithubCredentials] ( identifier[user] = identifier[settings] . identifier[EXREPORTER_GITHUB_USER] , identifier[repo] = identifier[settings] . identifier[EXREPORTER_GITHUB_REPO] , identifier[auth_token] = identifier[settings] . identifier[EXREPORTER_GITHUB_AUTH_TOKEN] ) identifier[gs] = identifier[GithubStore] ( identifier[credentials] = identifier[gc] ) identifier[reporter] = identifier[ExReporter] ( identifier[store] = identifier[gs] , identifier[labels] = identifier[settings] . identifier[EXREPORTER_GITHUB_LABELS] ) identifier[reporter] . identifier[report] ()
def process_exception(self, request, exception): """Report exceptions from requests via Exreporter. """ gc = GithubCredentials(user=settings.EXREPORTER_GITHUB_USER, repo=settings.EXREPORTER_GITHUB_REPO, auth_token=settings.EXREPORTER_GITHUB_AUTH_TOKEN) gs = GithubStore(credentials=gc) reporter = ExReporter(store=gs, labels=settings.EXREPORTER_GITHUB_LABELS) reporter.report()
def create_direct_channel(current): """ Create a One-To-One channel between current and selected user. .. code-block:: python # request: { 'view':'_zops_create_direct_channel', 'user_key': key, } # response: { 'description': string, 'no_of_members': int, 'member_list': [ {'name': string, 'is_online': bool, 'avatar_url': string, }], 'last_messages': [MSG_DICT] 'status': 'Created', 'code': 201, 'channel_key': key, # of just created channel 'name': string, # name of subscribed channel } """ channel, sub_name = Channel.get_or_create_direct_channel(current.user_id, current.input['user_key']) current.input['key'] = channel.key show_channel(current) current.output.update({ 'status': 'Created', 'code': 201 })
def function[create_direct_channel, parameter[current]]: constant[ Create a One-To-One channel between current and selected user. .. code-block:: python # request: { 'view':'_zops_create_direct_channel', 'user_key': key, } # response: { 'description': string, 'no_of_members': int, 'member_list': [ {'name': string, 'is_online': bool, 'avatar_url': string, }], 'last_messages': [MSG_DICT] 'status': 'Created', 'code': 201, 'channel_key': key, # of just created channel 'name': string, # name of subscribed channel } ] <ast.Tuple object at 0x7da18f00cd60> assign[=] call[name[Channel].get_or_create_direct_channel, parameter[name[current].user_id, call[name[current].input][constant[user_key]]]] call[name[current].input][constant[key]] assign[=] name[channel].key call[name[show_channel], parameter[name[current]]] call[name[current].output.update, parameter[dictionary[[<ast.Constant object at 0x7da18f00ccd0>, <ast.Constant object at 0x7da18f00dc60>], [<ast.Constant object at 0x7da18f00f340>, <ast.Constant object at 0x7da18f00d420>]]]]
keyword[def] identifier[create_direct_channel] ( identifier[current] ): literal[string] identifier[channel] , identifier[sub_name] = identifier[Channel] . identifier[get_or_create_direct_channel] ( identifier[current] . identifier[user_id] , identifier[current] . identifier[input] [ literal[string] ]) identifier[current] . identifier[input] [ literal[string] ]= identifier[channel] . identifier[key] identifier[show_channel] ( identifier[current] ) identifier[current] . identifier[output] . identifier[update] ({ literal[string] : literal[string] , literal[string] : literal[int] })
def create_direct_channel(current): """ Create a One-To-One channel between current and selected user. .. code-block:: python # request: { 'view':'_zops_create_direct_channel', 'user_key': key, } # response: { 'description': string, 'no_of_members': int, 'member_list': [ {'name': string, 'is_online': bool, 'avatar_url': string, }], 'last_messages': [MSG_DICT] 'status': 'Created', 'code': 201, 'channel_key': key, # of just created channel 'name': string, # name of subscribed channel } """ (channel, sub_name) = Channel.get_or_create_direct_channel(current.user_id, current.input['user_key']) current.input['key'] = channel.key show_channel(current) current.output.update({'status': 'Created', 'code': 201})
def stdout_encode(u, default='utf-8'): """ Encodes a given string with the proper standard out encoding If sys.stdout.encoding isn't specified, it this defaults to @default @default: default encoding -> #str with standard out encoding """ # from http://stackoverflow.com/questions/3627793/best-output-type-and- # encoding-practices-for-repr-functions encoding = sys.stdout.encoding or default return u.encode(encoding, "replace").decode(encoding, "replace")
def function[stdout_encode, parameter[u, default]]: constant[ Encodes a given string with the proper standard out encoding If sys.stdout.encoding isn't specified, it this defaults to @default @default: default encoding -> #str with standard out encoding ] variable[encoding] assign[=] <ast.BoolOp object at 0x7da1b2716650> return[call[call[name[u].encode, parameter[name[encoding], constant[replace]]].decode, parameter[name[encoding], constant[replace]]]]
keyword[def] identifier[stdout_encode] ( identifier[u] , identifier[default] = literal[string] ): literal[string] identifier[encoding] = identifier[sys] . identifier[stdout] . identifier[encoding] keyword[or] identifier[default] keyword[return] identifier[u] . identifier[encode] ( identifier[encoding] , literal[string] ). identifier[decode] ( identifier[encoding] , literal[string] )
def stdout_encode(u, default='utf-8'): """ Encodes a given string with the proper standard out encoding If sys.stdout.encoding isn't specified, it this defaults to @default @default: default encoding -> #str with standard out encoding """ # from http://stackoverflow.com/questions/3627793/best-output-type-and- # encoding-practices-for-repr-functions encoding = sys.stdout.encoding or default return u.encode(encoding, 'replace').decode(encoding, 'replace')
def set_property(self, prop, value): """Change value of a DAAP property, e.g. volume or media position.""" cmd_url = 'ctrl-int/1/setproperty?{}={}&[AUTH]'.format( prop, value) return self.daap.post(cmd_url)
def function[set_property, parameter[self, prop, value]]: constant[Change value of a DAAP property, e.g. volume or media position.] variable[cmd_url] assign[=] call[constant[ctrl-int/1/setproperty?{}={}&[AUTH]].format, parameter[name[prop], name[value]]] return[call[name[self].daap.post, parameter[name[cmd_url]]]]
keyword[def] identifier[set_property] ( identifier[self] , identifier[prop] , identifier[value] ): literal[string] identifier[cmd_url] = literal[string] . identifier[format] ( identifier[prop] , identifier[value] ) keyword[return] identifier[self] . identifier[daap] . identifier[post] ( identifier[cmd_url] )
def set_property(self, prop, value): """Change value of a DAAP property, e.g. volume or media position.""" cmd_url = 'ctrl-int/1/setproperty?{}={}&[AUTH]'.format(prop, value) return self.daap.post(cmd_url)
def get_random_path(graph) -> List[BaseEntity]: """Get a random path from the graph as a list of nodes. :param pybel.BELGraph graph: A BEL graph """ wg = graph.to_undirected() nodes = wg.nodes() def pick_random_pair() -> Tuple[BaseEntity, BaseEntity]: """Get a pair of random nodes.""" return random.sample(nodes, k=2) source, target = pick_random_pair() tries = 0 sentinel_tries = 5 while not nx.has_path(wg, source, target) and tries < sentinel_tries: tries += 1 source, target = pick_random_pair() if tries == sentinel_tries: return [source] return nx.shortest_path(wg, source=source, target=target)
def function[get_random_path, parameter[graph]]: constant[Get a random path from the graph as a list of nodes. :param pybel.BELGraph graph: A BEL graph ] variable[wg] assign[=] call[name[graph].to_undirected, parameter[]] variable[nodes] assign[=] call[name[wg].nodes, parameter[]] def function[pick_random_pair, parameter[]]: constant[Get a pair of random nodes.] return[call[name[random].sample, parameter[name[nodes]]]] <ast.Tuple object at 0x7da18c4cf430> assign[=] call[name[pick_random_pair], parameter[]] variable[tries] assign[=] constant[0] variable[sentinel_tries] assign[=] constant[5] while <ast.BoolOp object at 0x7da18c4cc460> begin[:] <ast.AugAssign object at 0x7da1b0ebc160> <ast.Tuple object at 0x7da1b0ebdde0> assign[=] call[name[pick_random_pair], parameter[]] if compare[name[tries] equal[==] name[sentinel_tries]] begin[:] return[list[[<ast.Name object at 0x7da1b0ebdf00>]]] return[call[name[nx].shortest_path, parameter[name[wg]]]]
keyword[def] identifier[get_random_path] ( identifier[graph] )-> identifier[List] [ identifier[BaseEntity] ]: literal[string] identifier[wg] = identifier[graph] . identifier[to_undirected] () identifier[nodes] = identifier[wg] . identifier[nodes] () keyword[def] identifier[pick_random_pair] ()-> identifier[Tuple] [ identifier[BaseEntity] , identifier[BaseEntity] ]: literal[string] keyword[return] identifier[random] . identifier[sample] ( identifier[nodes] , identifier[k] = literal[int] ) identifier[source] , identifier[target] = identifier[pick_random_pair] () identifier[tries] = literal[int] identifier[sentinel_tries] = literal[int] keyword[while] keyword[not] identifier[nx] . identifier[has_path] ( identifier[wg] , identifier[source] , identifier[target] ) keyword[and] identifier[tries] < identifier[sentinel_tries] : identifier[tries] += literal[int] identifier[source] , identifier[target] = identifier[pick_random_pair] () keyword[if] identifier[tries] == identifier[sentinel_tries] : keyword[return] [ identifier[source] ] keyword[return] identifier[nx] . identifier[shortest_path] ( identifier[wg] , identifier[source] = identifier[source] , identifier[target] = identifier[target] )
def get_random_path(graph) -> List[BaseEntity]: """Get a random path from the graph as a list of nodes. :param pybel.BELGraph graph: A BEL graph """ wg = graph.to_undirected() nodes = wg.nodes() def pick_random_pair() -> Tuple[BaseEntity, BaseEntity]: """Get a pair of random nodes.""" return random.sample(nodes, k=2) (source, target) = pick_random_pair() tries = 0 sentinel_tries = 5 while not nx.has_path(wg, source, target) and tries < sentinel_tries: tries += 1 (source, target) = pick_random_pair() # depends on [control=['while'], data=[]] if tries == sentinel_tries: return [source] # depends on [control=['if'], data=[]] return nx.shortest_path(wg, source=source, target=target)
def get_closest_points(self, max_distance=None, origin_index=0, origin_raw=None): """ Get closest points to a given origin. Returns a list of 2 element tuples where first element is the destination and the second is the distance. """ if not self.dict_response['distance']['value']: self.get_distance_values() if origin_raw: origin = copy.deepcopy(self.dict_response['distance']['value'][origin_raw]) else: origin = copy.deepcopy(self.dict_response['distance']['value'][self.origins[origin_index]]) tmp_origin = copy.deepcopy(origin) if max_distance is not None: for k, v in tmp_origin.iteritems(): if v > max_distance or v == 'ZERO_RESULTS': del(origin[k]) return origin
def function[get_closest_points, parameter[self, max_distance, origin_index, origin_raw]]: constant[ Get closest points to a given origin. Returns a list of 2 element tuples where first element is the destination and the second is the distance. ] if <ast.UnaryOp object at 0x7da1b265f790> begin[:] call[name[self].get_distance_values, parameter[]] if name[origin_raw] begin[:] variable[origin] assign[=] call[name[copy].deepcopy, parameter[call[call[call[name[self].dict_response][constant[distance]]][constant[value]]][name[origin_raw]]]] variable[tmp_origin] assign[=] call[name[copy].deepcopy, parameter[name[origin]]] if compare[name[max_distance] is_not constant[None]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b268efb0>, <ast.Name object at 0x7da1b268c190>]]] in starred[call[name[tmp_origin].iteritems, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b268df60> begin[:] <ast.Delete object at 0x7da1b26490f0> return[name[origin]]
keyword[def] identifier[get_closest_points] ( identifier[self] , identifier[max_distance] = keyword[None] , identifier[origin_index] = literal[int] , identifier[origin_raw] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[dict_response] [ literal[string] ][ literal[string] ]: identifier[self] . identifier[get_distance_values] () keyword[if] identifier[origin_raw] : identifier[origin] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[dict_response] [ literal[string] ][ literal[string] ][ identifier[origin_raw] ]) keyword[else] : identifier[origin] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[dict_response] [ literal[string] ][ literal[string] ][ identifier[self] . identifier[origins] [ identifier[origin_index] ]]) identifier[tmp_origin] = identifier[copy] . identifier[deepcopy] ( identifier[origin] ) keyword[if] identifier[max_distance] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[k] , identifier[v] keyword[in] identifier[tmp_origin] . identifier[iteritems] (): keyword[if] identifier[v] > identifier[max_distance] keyword[or] identifier[v] == literal[string] : keyword[del] ( identifier[origin] [ identifier[k] ]) keyword[return] identifier[origin]
def get_closest_points(self, max_distance=None, origin_index=0, origin_raw=None): """ Get closest points to a given origin. Returns a list of 2 element tuples where first element is the destination and the second is the distance. """ if not self.dict_response['distance']['value']: self.get_distance_values() # depends on [control=['if'], data=[]] if origin_raw: origin = copy.deepcopy(self.dict_response['distance']['value'][origin_raw]) # depends on [control=['if'], data=[]] else: origin = copy.deepcopy(self.dict_response['distance']['value'][self.origins[origin_index]]) tmp_origin = copy.deepcopy(origin) if max_distance is not None: for (k, v) in tmp_origin.iteritems(): if v > max_distance or v == 'ZERO_RESULTS': del origin[k] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['max_distance']] return origin
def include_revision(revision_num, skip_factor=1.1): """Decide whether to include a revision. If the number of revisions is large, we exclude some revisions to avoid a quadratic blowup in runtime, since the article is likely also large. We make the ratio between consecutive included revision numbers appproximately equal to "factor". Args: revision_num: an integer skip_factor: a floating point number >= 1.0 Returns: a boolean """ if skip_factor <= 1.0: return True return (int(math.log1p(revision_num) / math.log(skip_factor)) != int( math.log(revision_num + 2.0) / math.log(skip_factor)))
def function[include_revision, parameter[revision_num, skip_factor]]: constant[Decide whether to include a revision. If the number of revisions is large, we exclude some revisions to avoid a quadratic blowup in runtime, since the article is likely also large. We make the ratio between consecutive included revision numbers appproximately equal to "factor". Args: revision_num: an integer skip_factor: a floating point number >= 1.0 Returns: a boolean ] if compare[name[skip_factor] less_or_equal[<=] constant[1.0]] begin[:] return[constant[True]] return[compare[call[name[int], parameter[binary_operation[call[name[math].log1p, parameter[name[revision_num]]] / call[name[math].log, parameter[name[skip_factor]]]]]] not_equal[!=] call[name[int], parameter[binary_operation[call[name[math].log, parameter[binary_operation[name[revision_num] + constant[2.0]]]] / call[name[math].log, parameter[name[skip_factor]]]]]]]]
keyword[def] identifier[include_revision] ( identifier[revision_num] , identifier[skip_factor] = literal[int] ): literal[string] keyword[if] identifier[skip_factor] <= literal[int] : keyword[return] keyword[True] keyword[return] ( identifier[int] ( identifier[math] . identifier[log1p] ( identifier[revision_num] )/ identifier[math] . identifier[log] ( identifier[skip_factor] ))!= identifier[int] ( identifier[math] . identifier[log] ( identifier[revision_num] + literal[int] )/ identifier[math] . identifier[log] ( identifier[skip_factor] )))
def include_revision(revision_num, skip_factor=1.1): """Decide whether to include a revision. If the number of revisions is large, we exclude some revisions to avoid a quadratic blowup in runtime, since the article is likely also large. We make the ratio between consecutive included revision numbers appproximately equal to "factor". Args: revision_num: an integer skip_factor: a floating point number >= 1.0 Returns: a boolean """ if skip_factor <= 1.0: return True # depends on [control=['if'], data=[]] return int(math.log1p(revision_num) / math.log(skip_factor)) != int(math.log(revision_num + 2.0) / math.log(skip_factor))
def get_managed_policies(group, **conn): """Get a list of the managed policy names that are attached to the group.""" managed_policies = list_attached_group_managed_policies(group['GroupName'], **conn) managed_policy_names = [] for policy in managed_policies: managed_policy_names.append(policy['PolicyName']) return managed_policy_names
def function[get_managed_policies, parameter[group]]: constant[Get a list of the managed policy names that are attached to the group.] variable[managed_policies] assign[=] call[name[list_attached_group_managed_policies], parameter[call[name[group]][constant[GroupName]]]] variable[managed_policy_names] assign[=] list[[]] for taget[name[policy]] in starred[name[managed_policies]] begin[:] call[name[managed_policy_names].append, parameter[call[name[policy]][constant[PolicyName]]]] return[name[managed_policy_names]]
keyword[def] identifier[get_managed_policies] ( identifier[group] ,** identifier[conn] ): literal[string] identifier[managed_policies] = identifier[list_attached_group_managed_policies] ( identifier[group] [ literal[string] ],** identifier[conn] ) identifier[managed_policy_names] =[] keyword[for] identifier[policy] keyword[in] identifier[managed_policies] : identifier[managed_policy_names] . identifier[append] ( identifier[policy] [ literal[string] ]) keyword[return] identifier[managed_policy_names]
def get_managed_policies(group, **conn): """Get a list of the managed policy names that are attached to the group.""" managed_policies = list_attached_group_managed_policies(group['GroupName'], **conn) managed_policy_names = [] for policy in managed_policies: managed_policy_names.append(policy['PolicyName']) # depends on [control=['for'], data=['policy']] return managed_policy_names
def __job_complete_dict(complete_status, manager, job_id): """ Build final dictionary describing completed job for consumption by Pulsar client. """ return_code = manager.return_code(job_id) if return_code == PULSAR_UNKNOWN_RETURN_CODE: return_code = None stdout_contents = manager.stdout_contents(job_id).decode("utf-8") stderr_contents = manager.stderr_contents(job_id).decode("utf-8") job_directory = manager.job_directory(job_id) as_dict = dict( job_id=job_id, complete="true", # Is this still used or is it legacy. status=complete_status, returncode=return_code, stdout=stdout_contents, stderr=stderr_contents, working_directory=job_directory.working_directory(), metadata_directory=job_directory.metadata_directory(), working_directory_contents=job_directory.working_directory_contents(), metadata_directory_contents=job_directory.metadata_directory_contents(), outputs_directory_contents=job_directory.outputs_directory_contents(), system_properties=manager.system_properties(), pulsar_version=pulsar_version, ) return as_dict
def function[__job_complete_dict, parameter[complete_status, manager, job_id]]: constant[ Build final dictionary describing completed job for consumption by Pulsar client. ] variable[return_code] assign[=] call[name[manager].return_code, parameter[name[job_id]]] if compare[name[return_code] equal[==] name[PULSAR_UNKNOWN_RETURN_CODE]] begin[:] variable[return_code] assign[=] constant[None] variable[stdout_contents] assign[=] call[call[name[manager].stdout_contents, parameter[name[job_id]]].decode, parameter[constant[utf-8]]] variable[stderr_contents] assign[=] call[call[name[manager].stderr_contents, parameter[name[job_id]]].decode, parameter[constant[utf-8]]] variable[job_directory] assign[=] call[name[manager].job_directory, parameter[name[job_id]]] variable[as_dict] assign[=] call[name[dict], parameter[]] return[name[as_dict]]
keyword[def] identifier[__job_complete_dict] ( identifier[complete_status] , identifier[manager] , identifier[job_id] ): literal[string] identifier[return_code] = identifier[manager] . identifier[return_code] ( identifier[job_id] ) keyword[if] identifier[return_code] == identifier[PULSAR_UNKNOWN_RETURN_CODE] : identifier[return_code] = keyword[None] identifier[stdout_contents] = identifier[manager] . identifier[stdout_contents] ( identifier[job_id] ). identifier[decode] ( literal[string] ) identifier[stderr_contents] = identifier[manager] . identifier[stderr_contents] ( identifier[job_id] ). identifier[decode] ( literal[string] ) identifier[job_directory] = identifier[manager] . identifier[job_directory] ( identifier[job_id] ) identifier[as_dict] = identifier[dict] ( identifier[job_id] = identifier[job_id] , identifier[complete] = literal[string] , identifier[status] = identifier[complete_status] , identifier[returncode] = identifier[return_code] , identifier[stdout] = identifier[stdout_contents] , identifier[stderr] = identifier[stderr_contents] , identifier[working_directory] = identifier[job_directory] . identifier[working_directory] (), identifier[metadata_directory] = identifier[job_directory] . identifier[metadata_directory] (), identifier[working_directory_contents] = identifier[job_directory] . identifier[working_directory_contents] (), identifier[metadata_directory_contents] = identifier[job_directory] . identifier[metadata_directory_contents] (), identifier[outputs_directory_contents] = identifier[job_directory] . identifier[outputs_directory_contents] (), identifier[system_properties] = identifier[manager] . identifier[system_properties] (), identifier[pulsar_version] = identifier[pulsar_version] , ) keyword[return] identifier[as_dict]
def __job_complete_dict(complete_status, manager, job_id): """ Build final dictionary describing completed job for consumption by Pulsar client. """ return_code = manager.return_code(job_id) if return_code == PULSAR_UNKNOWN_RETURN_CODE: return_code = None # depends on [control=['if'], data=['return_code']] stdout_contents = manager.stdout_contents(job_id).decode('utf-8') stderr_contents = manager.stderr_contents(job_id).decode('utf-8') job_directory = manager.job_directory(job_id) # Is this still used or is it legacy. as_dict = dict(job_id=job_id, complete='true', status=complete_status, returncode=return_code, stdout=stdout_contents, stderr=stderr_contents, working_directory=job_directory.working_directory(), metadata_directory=job_directory.metadata_directory(), working_directory_contents=job_directory.working_directory_contents(), metadata_directory_contents=job_directory.metadata_directory_contents(), outputs_directory_contents=job_directory.outputs_directory_contents(), system_properties=manager.system_properties(), pulsar_version=pulsar_version) return as_dict
def get_ajax(self, request, *args, **kwargs): """ Called when accessed via AJAX on the request method specified by the Datatable. """ response_data = self.get_json_response_object(self._datatable) response = HttpResponse(self.serialize_to_json(response_data), content_type="application/json") return response
def function[get_ajax, parameter[self, request]]: constant[ Called when accessed via AJAX on the request method specified by the Datatable. ] variable[response_data] assign[=] call[name[self].get_json_response_object, parameter[name[self]._datatable]] variable[response] assign[=] call[name[HttpResponse], parameter[call[name[self].serialize_to_json, parameter[name[response_data]]]]] return[name[response]]
keyword[def] identifier[get_ajax] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[response_data] = identifier[self] . identifier[get_json_response_object] ( identifier[self] . identifier[_datatable] ) identifier[response] = identifier[HttpResponse] ( identifier[self] . identifier[serialize_to_json] ( identifier[response_data] ), identifier[content_type] = literal[string] ) keyword[return] identifier[response]
def get_ajax(self, request, *args, **kwargs): """ Called when accessed via AJAX on the request method specified by the Datatable. """ response_data = self.get_json_response_object(self._datatable) response = HttpResponse(self.serialize_to_json(response_data), content_type='application/json') return response
def cublasZgemm(handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc): """ Matrix-matrix product for complex general matrix. """ status = _libcublas.cublasZgemm_v2(handle, _CUBLAS_OP[transa], _CUBLAS_OP[transb], m, n, k, ctypes.byref(cuda.cuDoubleComplex(alpha.real, alpha.imag)), int(A), lda, int(B), ldb, ctypes.byref(cuda.cuDoubleComplex(beta.real, beta.imag)), int(C), ldc) cublasCheckStatus(status)
def function[cublasZgemm, parameter[handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc]]: constant[ Matrix-matrix product for complex general matrix. ] variable[status] assign[=] call[name[_libcublas].cublasZgemm_v2, parameter[name[handle], call[name[_CUBLAS_OP]][name[transa]], call[name[_CUBLAS_OP]][name[transb]], name[m], name[n], name[k], call[name[ctypes].byref, parameter[call[name[cuda].cuDoubleComplex, parameter[name[alpha].real, name[alpha].imag]]]], call[name[int], parameter[name[A]]], name[lda], call[name[int], parameter[name[B]]], name[ldb], call[name[ctypes].byref, parameter[call[name[cuda].cuDoubleComplex, parameter[name[beta].real, name[beta].imag]]]], call[name[int], parameter[name[C]]], name[ldc]]] call[name[cublasCheckStatus], parameter[name[status]]]
keyword[def] identifier[cublasZgemm] ( identifier[handle] , identifier[transa] , identifier[transb] , identifier[m] , identifier[n] , identifier[k] , identifier[alpha] , identifier[A] , identifier[lda] , identifier[B] , identifier[ldb] , identifier[beta] , identifier[C] , identifier[ldc] ): literal[string] identifier[status] = identifier[_libcublas] . identifier[cublasZgemm_v2] ( identifier[handle] , identifier[_CUBLAS_OP] [ identifier[transa] ], identifier[_CUBLAS_OP] [ identifier[transb] ], identifier[m] , identifier[n] , identifier[k] , identifier[ctypes] . identifier[byref] ( identifier[cuda] . identifier[cuDoubleComplex] ( identifier[alpha] . identifier[real] , identifier[alpha] . identifier[imag] )), identifier[int] ( identifier[A] ), identifier[lda] , identifier[int] ( identifier[B] ), identifier[ldb] , identifier[ctypes] . identifier[byref] ( identifier[cuda] . identifier[cuDoubleComplex] ( identifier[beta] . identifier[real] , identifier[beta] . identifier[imag] )), identifier[int] ( identifier[C] ), identifier[ldc] ) identifier[cublasCheckStatus] ( identifier[status] )
def cublasZgemm(handle, transa, transb, m, n, k, alpha, A, lda, B, ldb, beta, C, ldc): """ Matrix-matrix product for complex general matrix. """ status = _libcublas.cublasZgemm_v2(handle, _CUBLAS_OP[transa], _CUBLAS_OP[transb], m, n, k, ctypes.byref(cuda.cuDoubleComplex(alpha.real, alpha.imag)), int(A), lda, int(B), ldb, ctypes.byref(cuda.cuDoubleComplex(beta.real, beta.imag)), int(C), ldc) cublasCheckStatus(status)
def nameTuple(s: Influence) -> Tuple[str, str]: """ Returns a 2-tuple consisting of the top groundings of the subj and obj of an Influence statement. """ return top_grounding(s.subj), top_grounding(s.obj)
def function[nameTuple, parameter[s]]: constant[ Returns a 2-tuple consisting of the top groundings of the subj and obj of an Influence statement. ] return[tuple[[<ast.Call object at 0x7da20c6aba90>, <ast.Call object at 0x7da20c6ab460>]]]
keyword[def] identifier[nameTuple] ( identifier[s] : identifier[Influence] )-> identifier[Tuple] [ identifier[str] , identifier[str] ]: literal[string] keyword[return] identifier[top_grounding] ( identifier[s] . identifier[subj] ), identifier[top_grounding] ( identifier[s] . identifier[obj] )
def nameTuple(s: Influence) -> Tuple[str, str]: """ Returns a 2-tuple consisting of the top groundings of the subj and obj of an Influence statement. """ return (top_grounding(s.subj), top_grounding(s.obj))
def awaitTermination(self, timeout=None): """Wait for context to stop. :param float timeout: in seconds """ if timeout is not None: IOLoop.current().call_later(timeout, self.stop) IOLoop.current().start() IOLoop.clear_current()
def function[awaitTermination, parameter[self, timeout]]: constant[Wait for context to stop. :param float timeout: in seconds ] if compare[name[timeout] is_not constant[None]] begin[:] call[call[name[IOLoop].current, parameter[]].call_later, parameter[name[timeout], name[self].stop]] call[call[name[IOLoop].current, parameter[]].start, parameter[]] call[name[IOLoop].clear_current, parameter[]]
keyword[def] identifier[awaitTermination] ( identifier[self] , identifier[timeout] = keyword[None] ): literal[string] keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] : identifier[IOLoop] . identifier[current] (). identifier[call_later] ( identifier[timeout] , identifier[self] . identifier[stop] ) identifier[IOLoop] . identifier[current] (). identifier[start] () identifier[IOLoop] . identifier[clear_current] ()
def awaitTermination(self, timeout=None): """Wait for context to stop. :param float timeout: in seconds """ if timeout is not None: IOLoop.current().call_later(timeout, self.stop) # depends on [control=['if'], data=['timeout']] IOLoop.current().start() IOLoop.clear_current()
def parse(self): ''' The first method that should be called after creating an ExhaleRoot object. The Breathe graph is parsed first, followed by the Doxygen xml documents. By the end of this method, all of the ``self.<breathe_kind>``, ``self.all_compounds``, and ``self.all_nodes`` lists as well as the ``self.node_by_refid`` dictionary will be populated. Lastly, this method sorts all of the internal lists. The order of execution is exactly 1. :func:`~exhale.graph.ExhaleRoot.discoverAllNodes` 2. :func:`~exhale.graph.ExhaleRoot.reparentAll` 3. Populate ``self.node_by_refid`` using ``self.all_nodes``. 4. :func:`~exhale.graph.ExhaleRoot.fileRefDiscovery` 5. :func:`~exhale.graph.ExhaleRoot.filePostProcess` 6. :func:`~exhale.graph.ExhaleRoot.parseFunctionSignatures`. 7. :func:`~exhale.graph.ExhaleRoot.sortInternals` ''' self.discoverAllNodes() # now reparent everything we can # NOTE: it's very important that this happens before `fileRefDiscovery`, since # in that method we only want to consider direct descendants self.reparentAll() # now that we have all of the nodes, store them in a convenient manner for refid # lookup when parsing the Doxygen xml files for n in self.all_nodes: self.node_by_refid[n.refid] = n # find missing relationships using the Doxygen xml files self.fileRefDiscovery() self.filePostProcess() # gather the function signatures self.parseFunctionSignatures() # sort all of the lists we just built self.sortInternals()
def function[parse, parameter[self]]: constant[ The first method that should be called after creating an ExhaleRoot object. The Breathe graph is parsed first, followed by the Doxygen xml documents. By the end of this method, all of the ``self.<breathe_kind>``, ``self.all_compounds``, and ``self.all_nodes`` lists as well as the ``self.node_by_refid`` dictionary will be populated. Lastly, this method sorts all of the internal lists. The order of execution is exactly 1. :func:`~exhale.graph.ExhaleRoot.discoverAllNodes` 2. :func:`~exhale.graph.ExhaleRoot.reparentAll` 3. Populate ``self.node_by_refid`` using ``self.all_nodes``. 4. :func:`~exhale.graph.ExhaleRoot.fileRefDiscovery` 5. :func:`~exhale.graph.ExhaleRoot.filePostProcess` 6. :func:`~exhale.graph.ExhaleRoot.parseFunctionSignatures`. 7. :func:`~exhale.graph.ExhaleRoot.sortInternals` ] call[name[self].discoverAllNodes, parameter[]] call[name[self].reparentAll, parameter[]] for taget[name[n]] in starred[name[self].all_nodes] begin[:] call[name[self].node_by_refid][name[n].refid] assign[=] name[n] call[name[self].fileRefDiscovery, parameter[]] call[name[self].filePostProcess, parameter[]] call[name[self].parseFunctionSignatures, parameter[]] call[name[self].sortInternals, parameter[]]
keyword[def] identifier[parse] ( identifier[self] ): literal[string] identifier[self] . identifier[discoverAllNodes] () identifier[self] . identifier[reparentAll] () keyword[for] identifier[n] keyword[in] identifier[self] . identifier[all_nodes] : identifier[self] . identifier[node_by_refid] [ identifier[n] . identifier[refid] ]= identifier[n] identifier[self] . identifier[fileRefDiscovery] () identifier[self] . identifier[filePostProcess] () identifier[self] . identifier[parseFunctionSignatures] () identifier[self] . identifier[sortInternals] ()
def parse(self): """ The first method that should be called after creating an ExhaleRoot object. The Breathe graph is parsed first, followed by the Doxygen xml documents. By the end of this method, all of the ``self.<breathe_kind>``, ``self.all_compounds``, and ``self.all_nodes`` lists as well as the ``self.node_by_refid`` dictionary will be populated. Lastly, this method sorts all of the internal lists. The order of execution is exactly 1. :func:`~exhale.graph.ExhaleRoot.discoverAllNodes` 2. :func:`~exhale.graph.ExhaleRoot.reparentAll` 3. Populate ``self.node_by_refid`` using ``self.all_nodes``. 4. :func:`~exhale.graph.ExhaleRoot.fileRefDiscovery` 5. :func:`~exhale.graph.ExhaleRoot.filePostProcess` 6. :func:`~exhale.graph.ExhaleRoot.parseFunctionSignatures`. 7. :func:`~exhale.graph.ExhaleRoot.sortInternals` """ self.discoverAllNodes() # now reparent everything we can # NOTE: it's very important that this happens before `fileRefDiscovery`, since # in that method we only want to consider direct descendants self.reparentAll() # now that we have all of the nodes, store them in a convenient manner for refid # lookup when parsing the Doxygen xml files for n in self.all_nodes: self.node_by_refid[n.refid] = n # depends on [control=['for'], data=['n']] # find missing relationships using the Doxygen xml files self.fileRefDiscovery() self.filePostProcess() # gather the function signatures self.parseFunctionSignatures() # sort all of the lists we just built self.sortInternals()
def extract_aes(self, payload, master_pub=True): ''' Return the AES key received from the master after the minion has been successfully authenticated. :param dict payload: The incoming payload. This is a dictionary which may have the following keys: 'aes': The shared AES key 'enc': The format of the message. ('clear', 'pub', etc) 'publish_port': The TCP port which published the message 'token': The encrypted token used to verify the message. 'pub_key': The RSA public key of the sender. :rtype: str :return: The shared AES key received from the master. ''' if master_pub: try: aes, token = self.decrypt_aes(payload, master_pub) if token != self.token: log.error( 'The master failed to decrypt the random minion token' ) return '' except Exception: log.error( 'The master failed to decrypt the random minion token' ) return '' return aes else: aes, token = self.decrypt_aes(payload, master_pub) return aes
def function[extract_aes, parameter[self, payload, master_pub]]: constant[ Return the AES key received from the master after the minion has been successfully authenticated. :param dict payload: The incoming payload. This is a dictionary which may have the following keys: 'aes': The shared AES key 'enc': The format of the message. ('clear', 'pub', etc) 'publish_port': The TCP port which published the message 'token': The encrypted token used to verify the message. 'pub_key': The RSA public key of the sender. :rtype: str :return: The shared AES key received from the master. ] if name[master_pub] begin[:] <ast.Try object at 0x7da1b20947f0> return[name[aes]]
keyword[def] identifier[extract_aes] ( identifier[self] , identifier[payload] , identifier[master_pub] = keyword[True] ): literal[string] keyword[if] identifier[master_pub] : keyword[try] : identifier[aes] , identifier[token] = identifier[self] . identifier[decrypt_aes] ( identifier[payload] , identifier[master_pub] ) keyword[if] identifier[token] != identifier[self] . identifier[token] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] literal[string] keyword[except] identifier[Exception] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] literal[string] keyword[return] identifier[aes] keyword[else] : identifier[aes] , identifier[token] = identifier[self] . identifier[decrypt_aes] ( identifier[payload] , identifier[master_pub] ) keyword[return] identifier[aes]
def extract_aes(self, payload, master_pub=True): """ Return the AES key received from the master after the minion has been successfully authenticated. :param dict payload: The incoming payload. This is a dictionary which may have the following keys: 'aes': The shared AES key 'enc': The format of the message. ('clear', 'pub', etc) 'publish_port': The TCP port which published the message 'token': The encrypted token used to verify the message. 'pub_key': The RSA public key of the sender. :rtype: str :return: The shared AES key received from the master. """ if master_pub: try: (aes, token) = self.decrypt_aes(payload, master_pub) if token != self.token: log.error('The master failed to decrypt the random minion token') return '' # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception: log.error('The master failed to decrypt the random minion token') return '' # depends on [control=['except'], data=[]] return aes # depends on [control=['if'], data=[]] else: (aes, token) = self.decrypt_aes(payload, master_pub) return aes
def arraylike_to_numpy(array_like): """Convert a 1d array-like (e.g,. list, tensor, etc.) to an np.ndarray""" orig_type = type(array_like) # Convert to np.ndarray if isinstance(array_like, np.ndarray): pass elif isinstance(array_like, list): array_like = np.array(array_like) elif issparse(array_like): array_like = array_like.toarray() elif isinstance(array_like, torch.Tensor): array_like = array_like.numpy() elif not isinstance(array_like, np.ndarray): array_like = np.array(array_like) else: msg = f"Input of type {orig_type} could not be converted to 1d " "np.ndarray" raise ValueError(msg) # Correct shape if (array_like.ndim > 1) and (1 in array_like.shape): array_like = array_like.flatten() if array_like.ndim != 1: raise ValueError("Input could not be converted to 1d np.array") # Convert to ints if any(array_like % 1): raise ValueError("Input contains at least one non-integer value.") array_like = array_like.astype(np.dtype(int)) return array_like
def function[arraylike_to_numpy, parameter[array_like]]: constant[Convert a 1d array-like (e.g,. list, tensor, etc.) to an np.ndarray] variable[orig_type] assign[=] call[name[type], parameter[name[array_like]]] if call[name[isinstance], parameter[name[array_like], name[np].ndarray]] begin[:] pass if <ast.BoolOp object at 0x7da1b1b11600> begin[:] variable[array_like] assign[=] call[name[array_like].flatten, parameter[]] if compare[name[array_like].ndim not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b1b10a30> if call[name[any], parameter[binary_operation[name[array_like] <ast.Mod object at 0x7da2590d6920> constant[1]]]] begin[:] <ast.Raise object at 0x7da1b1b13fd0> variable[array_like] assign[=] call[name[array_like].astype, parameter[call[name[np].dtype, parameter[name[int]]]]] return[name[array_like]]
keyword[def] identifier[arraylike_to_numpy] ( identifier[array_like] ): literal[string] identifier[orig_type] = identifier[type] ( identifier[array_like] ) keyword[if] identifier[isinstance] ( identifier[array_like] , identifier[np] . identifier[ndarray] ): keyword[pass] keyword[elif] identifier[isinstance] ( identifier[array_like] , identifier[list] ): identifier[array_like] = identifier[np] . identifier[array] ( identifier[array_like] ) keyword[elif] identifier[issparse] ( identifier[array_like] ): identifier[array_like] = identifier[array_like] . identifier[toarray] () keyword[elif] identifier[isinstance] ( identifier[array_like] , identifier[torch] . identifier[Tensor] ): identifier[array_like] = identifier[array_like] . identifier[numpy] () keyword[elif] keyword[not] identifier[isinstance] ( identifier[array_like] , identifier[np] . identifier[ndarray] ): identifier[array_like] = identifier[np] . identifier[array] ( identifier[array_like] ) keyword[else] : identifier[msg] = literal[string] literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[if] ( identifier[array_like] . identifier[ndim] > literal[int] ) keyword[and] ( literal[int] keyword[in] identifier[array_like] . identifier[shape] ): identifier[array_like] = identifier[array_like] . identifier[flatten] () keyword[if] identifier[array_like] . identifier[ndim] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[any] ( identifier[array_like] % literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[array_like] = identifier[array_like] . identifier[astype] ( identifier[np] . identifier[dtype] ( identifier[int] )) keyword[return] identifier[array_like]
def arraylike_to_numpy(array_like): """Convert a 1d array-like (e.g,. list, tensor, etc.) to an np.ndarray""" orig_type = type(array_like) # Convert to np.ndarray if isinstance(array_like, np.ndarray): pass # depends on [control=['if'], data=[]] elif isinstance(array_like, list): array_like = np.array(array_like) # depends on [control=['if'], data=[]] elif issparse(array_like): array_like = array_like.toarray() # depends on [control=['if'], data=[]] elif isinstance(array_like, torch.Tensor): array_like = array_like.numpy() # depends on [control=['if'], data=[]] elif not isinstance(array_like, np.ndarray): array_like = np.array(array_like) # depends on [control=['if'], data=[]] else: msg = f'Input of type {orig_type} could not be converted to 1d np.ndarray' raise ValueError(msg) # Correct shape if array_like.ndim > 1 and 1 in array_like.shape: array_like = array_like.flatten() # depends on [control=['if'], data=[]] if array_like.ndim != 1: raise ValueError('Input could not be converted to 1d np.array') # depends on [control=['if'], data=[]] # Convert to ints if any(array_like % 1): raise ValueError('Input contains at least one non-integer value.') # depends on [control=['if'], data=[]] array_like = array_like.astype(np.dtype(int)) return array_like
def dcdict2rdfpy(dc_dict): """Convert a DC dictionary into an RDF Python object.""" ark_prefix = 'ark: ark:' uri = URIRef('') # Create the RDF Python object. rdf_py = ConjunctiveGraph() # Set DC namespace definition. DC = Namespace('http://purl.org/dc/elements/1.1/') # Get the ark for the subject URI from the ark identifier. for element_value in dc_dict['identifier']: if element_value['content'].startswith(ark_prefix): uri = URIRef( element_value['content'].replace( ark_prefix, 'info:ark' ) ) # Bind the prefix/namespace pair. rdf_py.bind('dc', DC) # Get the values for each element in the ordered DC elements. for element_name in DC_ORDER: element_value_list = dc_dict.get(element_name, []) # Add the values to the RDF object. for element_value in element_value_list: # Handle URL values differently. if ('http' in element_value['content'] and ' ' not in element_value['content']): rdf_py.add(( uri, DC[element_name], URIRef(element_value['content']) )) else: rdf_py.add(( uri, DC[element_name], Literal(element_value['content']) )) return rdf_py
def function[dcdict2rdfpy, parameter[dc_dict]]: constant[Convert a DC dictionary into an RDF Python object.] variable[ark_prefix] assign[=] constant[ark: ark:] variable[uri] assign[=] call[name[URIRef], parameter[constant[]]] variable[rdf_py] assign[=] call[name[ConjunctiveGraph], parameter[]] variable[DC] assign[=] call[name[Namespace], parameter[constant[http://purl.org/dc/elements/1.1/]]] for taget[name[element_value]] in starred[call[name[dc_dict]][constant[identifier]]] begin[:] if call[call[name[element_value]][constant[content]].startswith, parameter[name[ark_prefix]]] begin[:] variable[uri] assign[=] call[name[URIRef], parameter[call[call[name[element_value]][constant[content]].replace, parameter[name[ark_prefix], constant[info:ark]]]]] call[name[rdf_py].bind, parameter[constant[dc], name[DC]]] for taget[name[element_name]] in starred[name[DC_ORDER]] begin[:] variable[element_value_list] assign[=] call[name[dc_dict].get, parameter[name[element_name], list[[]]]] for taget[name[element_value]] in starred[name[element_value_list]] begin[:] if <ast.BoolOp object at 0x7da1b220b340> begin[:] call[name[rdf_py].add, parameter[tuple[[<ast.Name object at 0x7da1b220b760>, <ast.Subscript object at 0x7da1b220b9d0>, <ast.Call object at 0x7da1b220b6d0>]]]] return[name[rdf_py]]
keyword[def] identifier[dcdict2rdfpy] ( identifier[dc_dict] ): literal[string] identifier[ark_prefix] = literal[string] identifier[uri] = identifier[URIRef] ( literal[string] ) identifier[rdf_py] = identifier[ConjunctiveGraph] () identifier[DC] = identifier[Namespace] ( literal[string] ) keyword[for] identifier[element_value] keyword[in] identifier[dc_dict] [ literal[string] ]: keyword[if] identifier[element_value] [ literal[string] ]. identifier[startswith] ( identifier[ark_prefix] ): identifier[uri] = identifier[URIRef] ( identifier[element_value] [ literal[string] ]. identifier[replace] ( identifier[ark_prefix] , literal[string] ) ) identifier[rdf_py] . identifier[bind] ( literal[string] , identifier[DC] ) keyword[for] identifier[element_name] keyword[in] identifier[DC_ORDER] : identifier[element_value_list] = identifier[dc_dict] . identifier[get] ( identifier[element_name] ,[]) keyword[for] identifier[element_value] keyword[in] identifier[element_value_list] : keyword[if] ( literal[string] keyword[in] identifier[element_value] [ literal[string] ] keyword[and] literal[string] keyword[not] keyword[in] identifier[element_value] [ literal[string] ]): identifier[rdf_py] . identifier[add] (( identifier[uri] , identifier[DC] [ identifier[element_name] ], identifier[URIRef] ( identifier[element_value] [ literal[string] ]) )) keyword[else] : identifier[rdf_py] . identifier[add] (( identifier[uri] , identifier[DC] [ identifier[element_name] ], identifier[Literal] ( identifier[element_value] [ literal[string] ]) )) keyword[return] identifier[rdf_py]
def dcdict2rdfpy(dc_dict): """Convert a DC dictionary into an RDF Python object.""" ark_prefix = 'ark: ark:' uri = URIRef('') # Create the RDF Python object. rdf_py = ConjunctiveGraph() # Set DC namespace definition. DC = Namespace('http://purl.org/dc/elements/1.1/') # Get the ark for the subject URI from the ark identifier. for element_value in dc_dict['identifier']: if element_value['content'].startswith(ark_prefix): uri = URIRef(element_value['content'].replace(ark_prefix, 'info:ark')) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element_value']] # Bind the prefix/namespace pair. rdf_py.bind('dc', DC) # Get the values for each element in the ordered DC elements. for element_name in DC_ORDER: element_value_list = dc_dict.get(element_name, []) # Add the values to the RDF object. for element_value in element_value_list: # Handle URL values differently. if 'http' in element_value['content'] and ' ' not in element_value['content']: rdf_py.add((uri, DC[element_name], URIRef(element_value['content']))) # depends on [control=['if'], data=[]] else: rdf_py.add((uri, DC[element_name], Literal(element_value['content']))) # depends on [control=['for'], data=['element_value']] # depends on [control=['for'], data=['element_name']] return rdf_py
def section(self, regex, config='running_config'): """Returns a section of the config Args: regex (str): A valid regular expression used to select sections of configuration to return config (str): The configuration to return. Valid values for config are "running_config" or "startup_config". The default value is "running_config" Returns: The configuration section as a string object. """ if config in ['running_config', 'startup_config']: config = getattr(self, config) match = re.search(regex, config, re.M) if not match: raise TypeError('config section not found') block_start, line_end = match.regs[0] match = re.search(r'^[^\s]', config[line_end:], re.M) if not match: raise TypeError('could not find end block') _, block_end = match.regs[0] block_end = line_end + block_end return config[block_start:block_end]
def function[section, parameter[self, regex, config]]: constant[Returns a section of the config Args: regex (str): A valid regular expression used to select sections of configuration to return config (str): The configuration to return. Valid values for config are "running_config" or "startup_config". The default value is "running_config" Returns: The configuration section as a string object. ] if compare[name[config] in list[[<ast.Constant object at 0x7da1b24b0c10>, <ast.Constant object at 0x7da1b24b0880>]]] begin[:] variable[config] assign[=] call[name[getattr], parameter[name[self], name[config]]] variable[match] assign[=] call[name[re].search, parameter[name[regex], name[config], name[re].M]] if <ast.UnaryOp object at 0x7da1b24b12a0> begin[:] <ast.Raise object at 0x7da1b24b2290> <ast.Tuple object at 0x7da1b24b08b0> assign[=] call[name[match].regs][constant[0]] variable[match] assign[=] call[name[re].search, parameter[constant[^[^\s]], call[name[config]][<ast.Slice object at 0x7da1b24b3340>], name[re].M]] if <ast.UnaryOp object at 0x7da1b24b1540> begin[:] <ast.Raise object at 0x7da1b24b20b0> <ast.Tuple object at 0x7da1b24b3310> assign[=] call[name[match].regs][constant[0]] variable[block_end] assign[=] binary_operation[name[line_end] + name[block_end]] return[call[name[config]][<ast.Slice object at 0x7da1b24b2c20>]]
keyword[def] identifier[section] ( identifier[self] , identifier[regex] , identifier[config] = literal[string] ): literal[string] keyword[if] identifier[config] keyword[in] [ literal[string] , literal[string] ]: identifier[config] = identifier[getattr] ( identifier[self] , identifier[config] ) identifier[match] = identifier[re] . identifier[search] ( identifier[regex] , identifier[config] , identifier[re] . identifier[M] ) keyword[if] keyword[not] identifier[match] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[block_start] , identifier[line_end] = identifier[match] . identifier[regs] [ literal[int] ] identifier[match] = identifier[re] . identifier[search] ( literal[string] , identifier[config] [ identifier[line_end] :], identifier[re] . identifier[M] ) keyword[if] keyword[not] identifier[match] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[_] , identifier[block_end] = identifier[match] . identifier[regs] [ literal[int] ] identifier[block_end] = identifier[line_end] + identifier[block_end] keyword[return] identifier[config] [ identifier[block_start] : identifier[block_end] ]
def section(self, regex, config='running_config'): """Returns a section of the config Args: regex (str): A valid regular expression used to select sections of configuration to return config (str): The configuration to return. Valid values for config are "running_config" or "startup_config". The default value is "running_config" Returns: The configuration section as a string object. """ if config in ['running_config', 'startup_config']: config = getattr(self, config) # depends on [control=['if'], data=['config']] match = re.search(regex, config, re.M) if not match: raise TypeError('config section not found') # depends on [control=['if'], data=[]] (block_start, line_end) = match.regs[0] match = re.search('^[^\\s]', config[line_end:], re.M) if not match: raise TypeError('could not find end block') # depends on [control=['if'], data=[]] (_, block_end) = match.regs[0] block_end = line_end + block_end return config[block_start:block_end]
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ for subkey in registry_key.GetSubkeys(): values_dict = {} values_dict['subkey_name'] = subkey.name name_values = subkey.name.split('&') number_of_name_values = len(name_values) # Normally we expect 4 fields here however that is not always the case. if number_of_name_values != 4: logger.warning( 'Expected 4 &-separated values in: {0:s}'.format(subkey.name)) if number_of_name_values >= 1: values_dict['device_type'] = name_values[0] if number_of_name_values >= 2: values_dict['vendor'] = name_values[1] if number_of_name_values >= 3: values_dict['product'] = name_values[2] if number_of_name_values >= 4: values_dict['revision'] = name_values[3] event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.source_append = self._SOURCE_APPEND if subkey.number_of_subkeys == 0: # Time last USB device of this class was first inserted. event = time_events.DateTimeValuesEvent( subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) continue for device_key in subkey.GetSubkeys(): values_dict['serial'] = device_key.name friendly_name_value = device_key.GetValueByName('FriendlyName') if friendly_name_value: values_dict['friendly_name'] = friendly_name_value.GetDataAsObject() else: values_dict.pop('friendly_name', None) # ParentIdPrefix applies to Windows XP Only. parent_id_prefix_value = device_key.GetValueByName('ParentIdPrefix') if parent_id_prefix_value: values_dict['parent_id_prefix'] = ( parent_id_prefix_value.GetDataAsObject()) else: values_dict.pop('parent_id_prefix', None) # Time last USB device of this class was first inserted. event = time_events.DateTimeValuesEvent( subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) # Win7 - Last Connection. # Vista/XP - Time of an insert. event = time_events.DateTimeValuesEvent( device_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) device_parameter_key = device_key.GetSubkeyByName('Device Parameters') if device_parameter_key: event = time_events.DateTimeValuesEvent( device_parameter_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) log_configuration_key = device_key.GetSubkeyByName('LogConf') if log_configuration_key: event = time_events.DateTimeValuesEvent( log_configuration_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) properties_key = device_key.GetSubkeyByName('Properties') if properties_key: event = time_events.DateTimeValuesEvent( properties_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
def function[ExtractEvents, parameter[self, parser_mediator, registry_key]]: constant[Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. ] for taget[name[subkey]] in starred[call[name[registry_key].GetSubkeys, parameter[]]] begin[:] variable[values_dict] assign[=] dictionary[[], []] call[name[values_dict]][constant[subkey_name]] assign[=] name[subkey].name variable[name_values] assign[=] call[name[subkey].name.split, parameter[constant[&]]] variable[number_of_name_values] assign[=] call[name[len], parameter[name[name_values]]] if compare[name[number_of_name_values] not_equal[!=] constant[4]] begin[:] call[name[logger].warning, parameter[call[constant[Expected 4 &-separated values in: {0:s}].format, parameter[name[subkey].name]]]] if compare[name[number_of_name_values] greater_or_equal[>=] constant[1]] begin[:] call[name[values_dict]][constant[device_type]] assign[=] call[name[name_values]][constant[0]] if compare[name[number_of_name_values] greater_or_equal[>=] constant[2]] begin[:] call[name[values_dict]][constant[vendor]] assign[=] call[name[name_values]][constant[1]] if compare[name[number_of_name_values] greater_or_equal[>=] constant[3]] begin[:] call[name[values_dict]][constant[product]] assign[=] call[name[name_values]][constant[2]] if compare[name[number_of_name_values] greater_or_equal[>=] constant[4]] begin[:] call[name[values_dict]][constant[revision]] assign[=] call[name[name_values]][constant[3]] variable[event_data] assign[=] call[name[windows_events].WindowsRegistryEventData, parameter[]] name[event_data].key_path assign[=] name[registry_key].path name[event_data].offset assign[=] name[registry_key].offset name[event_data].regvalue assign[=] name[values_dict] name[event_data].source_append assign[=] name[self]._SOURCE_APPEND if compare[name[subkey].number_of_subkeys equal[==] constant[0]] begin[:] variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[subkey].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] continue for taget[name[device_key]] in starred[call[name[subkey].GetSubkeys, parameter[]]] begin[:] call[name[values_dict]][constant[serial]] assign[=] name[device_key].name variable[friendly_name_value] assign[=] call[name[device_key].GetValueByName, parameter[constant[FriendlyName]]] if name[friendly_name_value] begin[:] call[name[values_dict]][constant[friendly_name]] assign[=] call[name[friendly_name_value].GetDataAsObject, parameter[]] variable[parent_id_prefix_value] assign[=] call[name[device_key].GetValueByName, parameter[constant[ParentIdPrefix]]] if name[parent_id_prefix_value] begin[:] call[name[values_dict]][constant[parent_id_prefix]] assign[=] call[name[parent_id_prefix_value].GetDataAsObject, parameter[]] variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[subkey].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[device_key].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] variable[device_parameter_key] assign[=] call[name[device_key].GetSubkeyByName, parameter[constant[Device Parameters]]] if name[device_parameter_key] begin[:] variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[device_parameter_key].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] variable[log_configuration_key] assign[=] call[name[device_key].GetSubkeyByName, parameter[constant[LogConf]]] if name[log_configuration_key] begin[:] variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[log_configuration_key].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]] variable[properties_key] assign[=] call[name[device_key].GetSubkeyByName, parameter[constant[Properties]]] if name[properties_key] begin[:] variable[event] assign[=] call[name[time_events].DateTimeValuesEvent, parameter[name[properties_key].last_written_time, name[definitions].TIME_DESCRIPTION_WRITTEN]] call[name[parser_mediator].ProduceEventWithEventData, parameter[name[event], name[event_data]]]
keyword[def] identifier[ExtractEvents] ( identifier[self] , identifier[parser_mediator] , identifier[registry_key] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[subkey] keyword[in] identifier[registry_key] . identifier[GetSubkeys] (): identifier[values_dict] ={} identifier[values_dict] [ literal[string] ]= identifier[subkey] . identifier[name] identifier[name_values] = identifier[subkey] . identifier[name] . identifier[split] ( literal[string] ) identifier[number_of_name_values] = identifier[len] ( identifier[name_values] ) keyword[if] identifier[number_of_name_values] != literal[int] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[subkey] . identifier[name] )) keyword[if] identifier[number_of_name_values] >= literal[int] : identifier[values_dict] [ literal[string] ]= identifier[name_values] [ literal[int] ] keyword[if] identifier[number_of_name_values] >= literal[int] : identifier[values_dict] [ literal[string] ]= identifier[name_values] [ literal[int] ] keyword[if] identifier[number_of_name_values] >= literal[int] : identifier[values_dict] [ literal[string] ]= identifier[name_values] [ literal[int] ] keyword[if] identifier[number_of_name_values] >= literal[int] : identifier[values_dict] [ literal[string] ]= identifier[name_values] [ literal[int] ] identifier[event_data] = identifier[windows_events] . identifier[WindowsRegistryEventData] () identifier[event_data] . identifier[key_path] = identifier[registry_key] . identifier[path] identifier[event_data] . identifier[offset] = identifier[registry_key] . identifier[offset] identifier[event_data] . identifier[regvalue] = identifier[values_dict] identifier[event_data] . identifier[source_append] = identifier[self] . identifier[_SOURCE_APPEND] keyword[if] identifier[subkey] . identifier[number_of_subkeys] == literal[int] : identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] ( identifier[subkey] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) keyword[continue] keyword[for] identifier[device_key] keyword[in] identifier[subkey] . identifier[GetSubkeys] (): identifier[values_dict] [ literal[string] ]= identifier[device_key] . identifier[name] identifier[friendly_name_value] = identifier[device_key] . identifier[GetValueByName] ( literal[string] ) keyword[if] identifier[friendly_name_value] : identifier[values_dict] [ literal[string] ]= identifier[friendly_name_value] . identifier[GetDataAsObject] () keyword[else] : identifier[values_dict] . identifier[pop] ( literal[string] , keyword[None] ) identifier[parent_id_prefix_value] = identifier[device_key] . identifier[GetValueByName] ( literal[string] ) keyword[if] identifier[parent_id_prefix_value] : identifier[values_dict] [ literal[string] ]=( identifier[parent_id_prefix_value] . identifier[GetDataAsObject] ()) keyword[else] : identifier[values_dict] . identifier[pop] ( literal[string] , keyword[None] ) identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] ( identifier[subkey] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] ( identifier[device_key] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) identifier[device_parameter_key] = identifier[device_key] . identifier[GetSubkeyByName] ( literal[string] ) keyword[if] identifier[device_parameter_key] : identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] ( identifier[device_parameter_key] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) identifier[log_configuration_key] = identifier[device_key] . identifier[GetSubkeyByName] ( literal[string] ) keyword[if] identifier[log_configuration_key] : identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] ( identifier[log_configuration_key] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] ) identifier[properties_key] = identifier[device_key] . identifier[GetSubkeyByName] ( literal[string] ) keyword[if] identifier[properties_key] : identifier[event] = identifier[time_events] . identifier[DateTimeValuesEvent] ( identifier[properties_key] . identifier[last_written_time] , identifier[definitions] . identifier[TIME_DESCRIPTION_WRITTEN] ) identifier[parser_mediator] . identifier[ProduceEventWithEventData] ( identifier[event] , identifier[event_data] )
def ExtractEvents(self, parser_mediator, registry_key, **kwargs): """Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. """ for subkey in registry_key.GetSubkeys(): values_dict = {} values_dict['subkey_name'] = subkey.name name_values = subkey.name.split('&') number_of_name_values = len(name_values) # Normally we expect 4 fields here however that is not always the case. if number_of_name_values != 4: logger.warning('Expected 4 &-separated values in: {0:s}'.format(subkey.name)) # depends on [control=['if'], data=[]] if number_of_name_values >= 1: values_dict['device_type'] = name_values[0] # depends on [control=['if'], data=[]] if number_of_name_values >= 2: values_dict['vendor'] = name_values[1] # depends on [control=['if'], data=[]] if number_of_name_values >= 3: values_dict['product'] = name_values[2] # depends on [control=['if'], data=[]] if number_of_name_values >= 4: values_dict['revision'] = name_values[3] # depends on [control=['if'], data=[]] event_data = windows_events.WindowsRegistryEventData() event_data.key_path = registry_key.path event_data.offset = registry_key.offset event_data.regvalue = values_dict event_data.source_append = self._SOURCE_APPEND if subkey.number_of_subkeys == 0: # Time last USB device of this class was first inserted. event = time_events.DateTimeValuesEvent(subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) continue # depends on [control=['if'], data=[]] for device_key in subkey.GetSubkeys(): values_dict['serial'] = device_key.name friendly_name_value = device_key.GetValueByName('FriendlyName') if friendly_name_value: values_dict['friendly_name'] = friendly_name_value.GetDataAsObject() # depends on [control=['if'], data=[]] else: values_dict.pop('friendly_name', None) # ParentIdPrefix applies to Windows XP Only. parent_id_prefix_value = device_key.GetValueByName('ParentIdPrefix') if parent_id_prefix_value: values_dict['parent_id_prefix'] = parent_id_prefix_value.GetDataAsObject() # depends on [control=['if'], data=[]] else: values_dict.pop('parent_id_prefix', None) # Time last USB device of this class was first inserted. event = time_events.DateTimeValuesEvent(subkey.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) # Win7 - Last Connection. # Vista/XP - Time of an insert. event = time_events.DateTimeValuesEvent(device_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) device_parameter_key = device_key.GetSubkeyByName('Device Parameters') if device_parameter_key: event = time_events.DateTimeValuesEvent(device_parameter_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=[]] log_configuration_key = device_key.GetSubkeyByName('LogConf') if log_configuration_key: event = time_events.DateTimeValuesEvent(log_configuration_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=[]] properties_key = device_key.GetSubkeyByName('Properties') if properties_key: event = time_events.DateTimeValuesEvent(properties_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['device_key']] # depends on [control=['for'], data=['subkey']]
def _F_indegree(H, F): """Returns the result of a function F applied to the list of indegrees in in the hypergraph. :param H: the hypergraph whose indegrees will be operated on. :param F: function to execute on the list of indegrees in the hypergraph. :returns: result of the given function F. :raises: TypeError -- Algorithm only applicable to directed hypergraphs """ if not isinstance(H, DirectedHypergraph): raise TypeError("Algorithm only applicable to directed hypergraphs") return F([len(H.get_backward_star(node)) for node in H.get_node_set()])
def function[_F_indegree, parameter[H, F]]: constant[Returns the result of a function F applied to the list of indegrees in in the hypergraph. :param H: the hypergraph whose indegrees will be operated on. :param F: function to execute on the list of indegrees in the hypergraph. :returns: result of the given function F. :raises: TypeError -- Algorithm only applicable to directed hypergraphs ] if <ast.UnaryOp object at 0x7da1b11d4730> begin[:] <ast.Raise object at 0x7da1b11d6bf0> return[call[name[F], parameter[<ast.ListComp object at 0x7da1b11d5390>]]]
keyword[def] identifier[_F_indegree] ( identifier[H] , identifier[F] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[H] , identifier[DirectedHypergraph] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[F] ([ identifier[len] ( identifier[H] . identifier[get_backward_star] ( identifier[node] )) keyword[for] identifier[node] keyword[in] identifier[H] . identifier[get_node_set] ()])
def _F_indegree(H, F): """Returns the result of a function F applied to the list of indegrees in in the hypergraph. :param H: the hypergraph whose indegrees will be operated on. :param F: function to execute on the list of indegrees in the hypergraph. :returns: result of the given function F. :raises: TypeError -- Algorithm only applicable to directed hypergraphs """ if not isinstance(H, DirectedHypergraph): raise TypeError('Algorithm only applicable to directed hypergraphs') # depends on [control=['if'], data=[]] return F([len(H.get_backward_star(node)) for node in H.get_node_set()])
def bearing_to_nearest_place(feature, parent): """If the impact layer has a distance field, it will return the bearing to the nearest place in degrees. e.g. bearing_to_nearest_place() -> 280 """ _ = feature, parent # NOQA layer = exposure_summary_layer() if not layer: return None index = layer.fields().lookupField( bearing_field['field_name']) if index < 0: return None feature = next(layer.getFeatures()) return feature[index]
def function[bearing_to_nearest_place, parameter[feature, parent]]: constant[If the impact layer has a distance field, it will return the bearing to the nearest place in degrees. e.g. bearing_to_nearest_place() -> 280 ] variable[_] assign[=] tuple[[<ast.Name object at 0x7da1b0c453f0>, <ast.Name object at 0x7da1b0c453c0>]] variable[layer] assign[=] call[name[exposure_summary_layer], parameter[]] if <ast.UnaryOp object at 0x7da1b0c44670> begin[:] return[constant[None]] variable[index] assign[=] call[call[name[layer].fields, parameter[]].lookupField, parameter[call[name[bearing_field]][constant[field_name]]]] if compare[name[index] less[<] constant[0]] begin[:] return[constant[None]] variable[feature] assign[=] call[name[next], parameter[call[name[layer].getFeatures, parameter[]]]] return[call[name[feature]][name[index]]]
keyword[def] identifier[bearing_to_nearest_place] ( identifier[feature] , identifier[parent] ): literal[string] identifier[_] = identifier[feature] , identifier[parent] identifier[layer] = identifier[exposure_summary_layer] () keyword[if] keyword[not] identifier[layer] : keyword[return] keyword[None] identifier[index] = identifier[layer] . identifier[fields] (). identifier[lookupField] ( identifier[bearing_field] [ literal[string] ]) keyword[if] identifier[index] < literal[int] : keyword[return] keyword[None] identifier[feature] = identifier[next] ( identifier[layer] . identifier[getFeatures] ()) keyword[return] identifier[feature] [ identifier[index] ]
def bearing_to_nearest_place(feature, parent): """If the impact layer has a distance field, it will return the bearing to the nearest place in degrees. e.g. bearing_to_nearest_place() -> 280 """ _ = (feature, parent) # NOQA layer = exposure_summary_layer() if not layer: return None # depends on [control=['if'], data=[]] index = layer.fields().lookupField(bearing_field['field_name']) if index < 0: return None # depends on [control=['if'], data=[]] feature = next(layer.getFeatures()) return feature[index]
def calculate_wer(reference, hypothesis): """ Calculation of WER with Levenshtein distance. Works only for iterables up to 254 elements (uint8). O(nm) time and space complexity. >>> calculate_wer("who is there".split(), "is there".split()) 1 >>> calculate_wer("who is there".split(), "".split()) 3 >>> calculate_wer("".split(), "who is there".split()) 3 """ # initialisation import numpy d = numpy.zeros((len(reference)+1)*(len(hypothesis)+1), dtype=numpy.uint8) d = d.reshape((len(reference)+1, len(hypothesis)+1)) for i in range(len(reference)+1): for j in range(len(hypothesis)+1): if i == 0: d[0][j] = j elif j == 0: d[i][0] = i # computation for i in range(1, len(reference)+1): for j in range(1, len(hypothesis)+1): if reference[i-1] == hypothesis[j-1]: d[i][j] = d[i-1][j-1] else: substitution = d[i-1][j-1] + 1 insertion = d[i][j-1] + 1 deletion = d[i-1][j] + 1 d[i][j] = min(substitution, insertion, deletion) return d[len(reference)][len(hypothesis)]/float(len(reference))
def function[calculate_wer, parameter[reference, hypothesis]]: constant[ Calculation of WER with Levenshtein distance. Works only for iterables up to 254 elements (uint8). O(nm) time and space complexity. >>> calculate_wer("who is there".split(), "is there".split()) 1 >>> calculate_wer("who is there".split(), "".split()) 3 >>> calculate_wer("".split(), "who is there".split()) 3 ] import module[numpy] variable[d] assign[=] call[name[numpy].zeros, parameter[binary_operation[binary_operation[call[name[len], parameter[name[reference]]] + constant[1]] * binary_operation[call[name[len], parameter[name[hypothesis]]] + constant[1]]]]] variable[d] assign[=] call[name[d].reshape, parameter[tuple[[<ast.BinOp object at 0x7da2054a6e90>, <ast.BinOp object at 0x7da2054a5ae0>]]]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[reference]]] + constant[1]]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[hypothesis]]] + constant[1]]]]] begin[:] if compare[name[i] equal[==] constant[0]] begin[:] call[call[name[d]][constant[0]]][name[j]] assign[=] name[j] for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[reference]]] + constant[1]]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[constant[1], binary_operation[call[name[len], parameter[name[hypothesis]]] + constant[1]]]]] begin[:] if compare[call[name[reference]][binary_operation[name[i] - constant[1]]] equal[==] call[name[hypothesis]][binary_operation[name[j] - constant[1]]]] begin[:] call[call[name[d]][name[i]]][name[j]] assign[=] call[call[name[d]][binary_operation[name[i] - constant[1]]]][binary_operation[name[j] - constant[1]]] return[binary_operation[call[call[name[d]][call[name[len], parameter[name[reference]]]]][call[name[len], parameter[name[hypothesis]]]] / call[name[float], parameter[call[name[len], parameter[name[reference]]]]]]]
keyword[def] identifier[calculate_wer] ( identifier[reference] , identifier[hypothesis] ): literal[string] keyword[import] identifier[numpy] identifier[d] = identifier[numpy] . identifier[zeros] (( identifier[len] ( identifier[reference] )+ literal[int] )*( identifier[len] ( identifier[hypothesis] )+ literal[int] ), identifier[dtype] = identifier[numpy] . identifier[uint8] ) identifier[d] = identifier[d] . identifier[reshape] (( identifier[len] ( identifier[reference] )+ literal[int] , identifier[len] ( identifier[hypothesis] )+ literal[int] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[reference] )+ literal[int] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[hypothesis] )+ literal[int] ): keyword[if] identifier[i] == literal[int] : identifier[d] [ literal[int] ][ identifier[j] ]= identifier[j] keyword[elif] identifier[j] == literal[int] : identifier[d] [ identifier[i] ][ literal[int] ]= identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[reference] )+ literal[int] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[hypothesis] )+ literal[int] ): keyword[if] identifier[reference] [ identifier[i] - literal[int] ]== identifier[hypothesis] [ identifier[j] - literal[int] ]: identifier[d] [ identifier[i] ][ identifier[j] ]= identifier[d] [ identifier[i] - literal[int] ][ identifier[j] - literal[int] ] keyword[else] : identifier[substitution] = identifier[d] [ identifier[i] - literal[int] ][ identifier[j] - literal[int] ]+ literal[int] identifier[insertion] = identifier[d] [ identifier[i] ][ identifier[j] - literal[int] ]+ literal[int] identifier[deletion] = identifier[d] [ identifier[i] - literal[int] ][ identifier[j] ]+ literal[int] identifier[d] [ identifier[i] ][ identifier[j] ]= identifier[min] ( identifier[substitution] , identifier[insertion] , identifier[deletion] ) keyword[return] identifier[d] [ identifier[len] ( identifier[reference] )][ identifier[len] ( identifier[hypothesis] )]/ identifier[float] ( identifier[len] ( identifier[reference] ))
def calculate_wer(reference, hypothesis): """ Calculation of WER with Levenshtein distance. Works only for iterables up to 254 elements (uint8). O(nm) time and space complexity. >>> calculate_wer("who is there".split(), "is there".split()) 1 >>> calculate_wer("who is there".split(), "".split()) 3 >>> calculate_wer("".split(), "who is there".split()) 3 """ # initialisation import numpy d = numpy.zeros((len(reference) + 1) * (len(hypothesis) + 1), dtype=numpy.uint8) d = d.reshape((len(reference) + 1, len(hypothesis) + 1)) for i in range(len(reference) + 1): for j in range(len(hypothesis) + 1): if i == 0: d[0][j] = j # depends on [control=['if'], data=[]] elif j == 0: d[i][0] = i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # computation for i in range(1, len(reference) + 1): for j in range(1, len(hypothesis) + 1): if reference[i - 1] == hypothesis[j - 1]: d[i][j] = d[i - 1][j - 1] # depends on [control=['if'], data=[]] else: substitution = d[i - 1][j - 1] + 1 insertion = d[i][j - 1] + 1 deletion = d[i - 1][j] + 1 d[i][j] = min(substitution, insertion, deletion) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return d[len(reference)][len(hypothesis)] / float(len(reference))
def date_from_duration(base_date, number_as_string, unit, duration, base_time=None): """ Find dates from duration Eg: 20 days from now Currently does not support strings like "20 days from last monday". """ # Check if query is `2 days before yesterday` or `day before yesterday` if base_time is not None: base_date = date_from_adverb(base_date, base_time) num = convert_string_to_number(number_as_string) if unit in day_variations: args = {'days': num} elif unit in minute_variations: args = {'minutes': num} elif unit in week_variations: args = {'weeks': num} elif unit in month_variations: args = {'days': 365 * num / 12} elif unit in year_variations: args = {'years': num} if duration == 'ago' or duration == 'before' or duration == 'earlier': if 'years' in args: return datetime(base_date.year - args['years'], base_date.month, base_date.day) return base_date - timedelta(**args) elif duration == 'after' or duration == 'later' or duration == 'from now': if 'years' in args: return datetime(base_date.year + args['years'], base_date.month, base_date.day) return base_date + timedelta(**args)
def function[date_from_duration, parameter[base_date, number_as_string, unit, duration, base_time]]: constant[ Find dates from duration Eg: 20 days from now Currently does not support strings like "20 days from last monday". ] if compare[name[base_time] is_not constant[None]] begin[:] variable[base_date] assign[=] call[name[date_from_adverb], parameter[name[base_date], name[base_time]]] variable[num] assign[=] call[name[convert_string_to_number], parameter[name[number_as_string]]] if compare[name[unit] in name[day_variations]] begin[:] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b20ca650>], [<ast.Name object at 0x7da1b20c9f60>]] if <ast.BoolOp object at 0x7da1b20c87f0> begin[:] if compare[constant[years] in name[args]] begin[:] return[call[name[datetime], parameter[binary_operation[name[base_date].year - call[name[args]][constant[years]]], name[base_date].month, name[base_date].day]]] return[binary_operation[name[base_date] - call[name[timedelta], parameter[]]]]
keyword[def] identifier[date_from_duration] ( identifier[base_date] , identifier[number_as_string] , identifier[unit] , identifier[duration] , identifier[base_time] = keyword[None] ): literal[string] keyword[if] identifier[base_time] keyword[is] keyword[not] keyword[None] : identifier[base_date] = identifier[date_from_adverb] ( identifier[base_date] , identifier[base_time] ) identifier[num] = identifier[convert_string_to_number] ( identifier[number_as_string] ) keyword[if] identifier[unit] keyword[in] identifier[day_variations] : identifier[args] ={ literal[string] : identifier[num] } keyword[elif] identifier[unit] keyword[in] identifier[minute_variations] : identifier[args] ={ literal[string] : identifier[num] } keyword[elif] identifier[unit] keyword[in] identifier[week_variations] : identifier[args] ={ literal[string] : identifier[num] } keyword[elif] identifier[unit] keyword[in] identifier[month_variations] : identifier[args] ={ literal[string] : literal[int] * identifier[num] / literal[int] } keyword[elif] identifier[unit] keyword[in] identifier[year_variations] : identifier[args] ={ literal[string] : identifier[num] } keyword[if] identifier[duration] == literal[string] keyword[or] identifier[duration] == literal[string] keyword[or] identifier[duration] == literal[string] : keyword[if] literal[string] keyword[in] identifier[args] : keyword[return] identifier[datetime] ( identifier[base_date] . identifier[year] - identifier[args] [ literal[string] ], identifier[base_date] . identifier[month] , identifier[base_date] . identifier[day] ) keyword[return] identifier[base_date] - identifier[timedelta] (** identifier[args] ) keyword[elif] identifier[duration] == literal[string] keyword[or] identifier[duration] == literal[string] keyword[or] identifier[duration] == literal[string] : keyword[if] literal[string] keyword[in] identifier[args] : keyword[return] identifier[datetime] ( identifier[base_date] . identifier[year] + identifier[args] [ literal[string] ], identifier[base_date] . identifier[month] , identifier[base_date] . identifier[day] ) keyword[return] identifier[base_date] + identifier[timedelta] (** identifier[args] )
def date_from_duration(base_date, number_as_string, unit, duration, base_time=None): """ Find dates from duration Eg: 20 days from now Currently does not support strings like "20 days from last monday". """ # Check if query is `2 days before yesterday` or `day before yesterday` if base_time is not None: base_date = date_from_adverb(base_date, base_time) # depends on [control=['if'], data=['base_time']] num = convert_string_to_number(number_as_string) if unit in day_variations: args = {'days': num} # depends on [control=['if'], data=[]] elif unit in minute_variations: args = {'minutes': num} # depends on [control=['if'], data=[]] elif unit in week_variations: args = {'weeks': num} # depends on [control=['if'], data=[]] elif unit in month_variations: args = {'days': 365 * num / 12} # depends on [control=['if'], data=[]] elif unit in year_variations: args = {'years': num} # depends on [control=['if'], data=[]] if duration == 'ago' or duration == 'before' or duration == 'earlier': if 'years' in args: return datetime(base_date.year - args['years'], base_date.month, base_date.day) # depends on [control=['if'], data=['args']] return base_date - timedelta(**args) # depends on [control=['if'], data=[]] elif duration == 'after' or duration == 'later' or duration == 'from now': if 'years' in args: return datetime(base_date.year + args['years'], base_date.month, base_date.day) # depends on [control=['if'], data=['args']] return base_date + timedelta(**args) # depends on [control=['if'], data=[]]
def _rectForce(x,pot,t=0.): """ NAME: _rectForce PURPOSE: returns the force in the rectangular frame INPUT: x - current position t - current time pot - (list of) Potential instance(s) OUTPUT: force HISTORY: 2011-02-02 - Written - Bovy (NYU) """ #x is rectangular so calculate R and phi R= nu.sqrt(x[0]**2.+x[1]**2.) phi= nu.arccos(x[0]/R) sinphi= x[1]/R cosphi= x[0]/R if x[1] < 0.: phi= 2.*nu.pi-phi #calculate forces Rforce= _evaluateRforces(pot,R,x[2],phi=phi,t=t) phiforce= _evaluatephiforces(pot,R,x[2],phi=phi,t=t) return nu.array([cosphi*Rforce-1./R*sinphi*phiforce, sinphi*Rforce+1./R*cosphi*phiforce, _evaluatezforces(pot,R,x[2],phi=phi,t=t)])
def function[_rectForce, parameter[x, pot, t]]: constant[ NAME: _rectForce PURPOSE: returns the force in the rectangular frame INPUT: x - current position t - current time pot - (list of) Potential instance(s) OUTPUT: force HISTORY: 2011-02-02 - Written - Bovy (NYU) ] variable[R] assign[=] call[name[nu].sqrt, parameter[binary_operation[binary_operation[call[name[x]][constant[0]] ** constant[2.0]] + binary_operation[call[name[x]][constant[1]] ** constant[2.0]]]]] variable[phi] assign[=] call[name[nu].arccos, parameter[binary_operation[call[name[x]][constant[0]] / name[R]]]] variable[sinphi] assign[=] binary_operation[call[name[x]][constant[1]] / name[R]] variable[cosphi] assign[=] binary_operation[call[name[x]][constant[0]] / name[R]] if compare[call[name[x]][constant[1]] less[<] constant[0.0]] begin[:] variable[phi] assign[=] binary_operation[binary_operation[constant[2.0] * name[nu].pi] - name[phi]] variable[Rforce] assign[=] call[name[_evaluateRforces], parameter[name[pot], name[R], call[name[x]][constant[2]]]] variable[phiforce] assign[=] call[name[_evaluatephiforces], parameter[name[pot], name[R], call[name[x]][constant[2]]]] return[call[name[nu].array, parameter[list[[<ast.BinOp object at 0x7da1b0da30d0>, <ast.BinOp object at 0x7da1b0da3190>, <ast.Call object at 0x7da1b0da0370>]]]]]
keyword[def] identifier[_rectForce] ( identifier[x] , identifier[pot] , identifier[t] = literal[int] ): literal[string] identifier[R] = identifier[nu] . identifier[sqrt] ( identifier[x] [ literal[int] ]** literal[int] + identifier[x] [ literal[int] ]** literal[int] ) identifier[phi] = identifier[nu] . identifier[arccos] ( identifier[x] [ literal[int] ]/ identifier[R] ) identifier[sinphi] = identifier[x] [ literal[int] ]/ identifier[R] identifier[cosphi] = identifier[x] [ literal[int] ]/ identifier[R] keyword[if] identifier[x] [ literal[int] ]< literal[int] : identifier[phi] = literal[int] * identifier[nu] . identifier[pi] - identifier[phi] identifier[Rforce] = identifier[_evaluateRforces] ( identifier[pot] , identifier[R] , identifier[x] [ literal[int] ], identifier[phi] = identifier[phi] , identifier[t] = identifier[t] ) identifier[phiforce] = identifier[_evaluatephiforces] ( identifier[pot] , identifier[R] , identifier[x] [ literal[int] ], identifier[phi] = identifier[phi] , identifier[t] = identifier[t] ) keyword[return] identifier[nu] . identifier[array] ([ identifier[cosphi] * identifier[Rforce] - literal[int] / identifier[R] * identifier[sinphi] * identifier[phiforce] , identifier[sinphi] * identifier[Rforce] + literal[int] / identifier[R] * identifier[cosphi] * identifier[phiforce] , identifier[_evaluatezforces] ( identifier[pot] , identifier[R] , identifier[x] [ literal[int] ], identifier[phi] = identifier[phi] , identifier[t] = identifier[t] )])
def _rectForce(x, pot, t=0.0): """ NAME: _rectForce PURPOSE: returns the force in the rectangular frame INPUT: x - current position t - current time pot - (list of) Potential instance(s) OUTPUT: force HISTORY: 2011-02-02 - Written - Bovy (NYU) """ #x is rectangular so calculate R and phi R = nu.sqrt(x[0] ** 2.0 + x[1] ** 2.0) phi = nu.arccos(x[0] / R) sinphi = x[1] / R cosphi = x[0] / R if x[1] < 0.0: phi = 2.0 * nu.pi - phi # depends on [control=['if'], data=[]] #calculate forces Rforce = _evaluateRforces(pot, R, x[2], phi=phi, t=t) phiforce = _evaluatephiforces(pot, R, x[2], phi=phi, t=t) return nu.array([cosphi * Rforce - 1.0 / R * sinphi * phiforce, sinphi * Rforce + 1.0 / R * cosphi * phiforce, _evaluatezforces(pot, R, x[2], phi=phi, t=t)])
def bisect(seq, func=bool): """ Split a sequence into two sequences: the first is elements that return False for func(element) and the second for True for func(element). By default, func is ``bool``, so uses the truth value of the object. >>> is_odd = lambda n: n%2 >>> even, odd = bisect(range(5), is_odd) >>> list(odd) [1, 3] >>> list(even) [0, 2, 4] >>> other, zeros = bisect(reversed(range(5))) >>> list(zeros) [0] >>> list(other) [4, 3, 2, 1] """ queues = GroupbySaved(seq, func) return queues.get_first_n_queues(2)
def function[bisect, parameter[seq, func]]: constant[ Split a sequence into two sequences: the first is elements that return False for func(element) and the second for True for func(element). By default, func is ``bool``, so uses the truth value of the object. >>> is_odd = lambda n: n%2 >>> even, odd = bisect(range(5), is_odd) >>> list(odd) [1, 3] >>> list(even) [0, 2, 4] >>> other, zeros = bisect(reversed(range(5))) >>> list(zeros) [0] >>> list(other) [4, 3, 2, 1] ] variable[queues] assign[=] call[name[GroupbySaved], parameter[name[seq], name[func]]] return[call[name[queues].get_first_n_queues, parameter[constant[2]]]]
keyword[def] identifier[bisect] ( identifier[seq] , identifier[func] = identifier[bool] ): literal[string] identifier[queues] = identifier[GroupbySaved] ( identifier[seq] , identifier[func] ) keyword[return] identifier[queues] . identifier[get_first_n_queues] ( literal[int] )
def bisect(seq, func=bool): """ Split a sequence into two sequences: the first is elements that return False for func(element) and the second for True for func(element). By default, func is ``bool``, so uses the truth value of the object. >>> is_odd = lambda n: n%2 >>> even, odd = bisect(range(5), is_odd) >>> list(odd) [1, 3] >>> list(even) [0, 2, 4] >>> other, zeros = bisect(reversed(range(5))) >>> list(zeros) [0] >>> list(other) [4, 3, 2, 1] """ queues = GroupbySaved(seq, func) return queues.get_first_n_queues(2)
def instance_path(cls, project, instance): """Return a fully-qualified instance string.""" return google.api_core.path_template.expand( "projects/{project}/instances/{instance}", project=project, instance=instance, )
def function[instance_path, parameter[cls, project, instance]]: constant[Return a fully-qualified instance string.] return[call[name[google].api_core.path_template.expand, parameter[constant[projects/{project}/instances/{instance}]]]]
keyword[def] identifier[instance_path] ( identifier[cls] , identifier[project] , identifier[instance] ): literal[string] keyword[return] identifier[google] . identifier[api_core] . identifier[path_template] . identifier[expand] ( literal[string] , identifier[project] = identifier[project] , identifier[instance] = identifier[instance] , )
def instance_path(cls, project, instance): """Return a fully-qualified instance string.""" return google.api_core.path_template.expand('projects/{project}/instances/{instance}', project=project, instance=instance)
def get_all_usb_devices(idVendor, idProduct): """ Returns a list of all the usb devices matching the provided vendor ID and product ID.""" all_dev = list(usb.core.find(find_all = True, idVendor = idVendor, idProduct = idProduct)) for dev in all_dev: try: dev.detach_kernel_driver(0) except usb.USBError: pass return all_dev
def function[get_all_usb_devices, parameter[idVendor, idProduct]]: constant[ Returns a list of all the usb devices matching the provided vendor ID and product ID.] variable[all_dev] assign[=] call[name[list], parameter[call[name[usb].core.find, parameter[]]]] for taget[name[dev]] in starred[name[all_dev]] begin[:] <ast.Try object at 0x7da18eb57790> return[name[all_dev]]
keyword[def] identifier[get_all_usb_devices] ( identifier[idVendor] , identifier[idProduct] ): literal[string] identifier[all_dev] = identifier[list] ( identifier[usb] . identifier[core] . identifier[find] ( identifier[find_all] = keyword[True] , identifier[idVendor] = identifier[idVendor] , identifier[idProduct] = identifier[idProduct] )) keyword[for] identifier[dev] keyword[in] identifier[all_dev] : keyword[try] : identifier[dev] . identifier[detach_kernel_driver] ( literal[int] ) keyword[except] identifier[usb] . identifier[USBError] : keyword[pass] keyword[return] identifier[all_dev]
def get_all_usb_devices(idVendor, idProduct): """ Returns a list of all the usb devices matching the provided vendor ID and product ID.""" all_dev = list(usb.core.find(find_all=True, idVendor=idVendor, idProduct=idProduct)) for dev in all_dev: try: dev.detach_kernel_driver(0) # depends on [control=['try'], data=[]] except usb.USBError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['dev']] return all_dev
def summary(model, input_size): """ Print summary of the model """ def register_hook(module): def hook(module, input, output): class_name = str(module.__class__).split('.')[-1].split("'")[0] module_idx = len(summary) m_key = '%s-%i' % (class_name, module_idx + 1) summary[m_key] = OrderedDict() summary[m_key]['input_shape'] = list(input[0].size()) summary[m_key]['input_shape'][0] = -1 if isinstance(output, (list, tuple)): summary[m_key]['output_shape'] = [[-1] + list(o.size())[1:] for o in output] else: summary[m_key]['output_shape'] = list(output.size()) summary[m_key]['output_shape'][0] = -1 params = 0 if hasattr(module, 'weight') and hasattr(module.weight, 'size'): params += torch.prod(torch.LongTensor(list(module.weight.size()))) summary[m_key]['trainable'] = module.weight.requires_grad if hasattr(module, 'bias') and hasattr(module.bias, 'size'): params += torch.prod(torch.LongTensor(list(module.bias.size()))) summary[m_key]['nb_params'] = params if (not isinstance(module, nn.Sequential) and not isinstance(module, nn.ModuleList) and not (module == model)): hooks.append(module.register_forward_hook(hook)) if torch.cuda.is_available(): dtype = torch.cuda.FloatTensor model = model.cuda() else: dtype = torch.FloatTensor model = model.cpu() # check if there are multiple inputs to the network if isinstance(input_size[0], (list, tuple)): x = [Variable(torch.rand(2, *in_size)).type(dtype) for in_size in input_size] else: x = Variable(torch.rand(2, *input_size)).type(dtype) # print(type(x[0])) # create properties summary = OrderedDict() hooks = [] # register hook model.apply(register_hook) # make a forward pass # print(x.shape) model(x) # remove these hooks for h in hooks: h.remove() print('----------------------------------------------------------------') line_new = '{:>20} {:>25} {:>15}'.format('Layer (type)', 'Output Shape', 'Param #') print(line_new) print('================================================================') total_params = 0 trainable_params = 0 for layer in summary: # input_shape, output_shape, trainable, nb_params line_new = '{:>20} {:>25} {:>15}'.format(layer, str(summary[layer]['output_shape']), '{0:,}'.format(summary[layer]['nb_params'])) total_params += summary[layer]['nb_params'] if 'trainable' in summary[layer]: if summary[layer]['trainable'] == True: trainable_params += summary[layer]['nb_params'] print(line_new) print('================================================================') print('Total params: {0:,}'.format(total_params)) print('Trainable params: {0:,}'.format(trainable_params)) print('Non-trainable params: {0:,}'.format(total_params - trainable_params)) print('----------------------------------------------------------------')
def function[summary, parameter[model, input_size]]: constant[ Print summary of the model ] def function[register_hook, parameter[module]]: def function[hook, parameter[module, input, output]]: variable[class_name] assign[=] call[call[call[call[call[name[str], parameter[name[module].__class__]].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b16be050>].split, parameter[constant[']]]][constant[0]] variable[module_idx] assign[=] call[name[len], parameter[name[summary]]] variable[m_key] assign[=] binary_operation[constant[%s-%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b16bd210>, <ast.BinOp object at 0x7da1b16bd810>]]] call[name[summary]][name[m_key]] assign[=] call[name[OrderedDict], parameter[]] call[call[name[summary]][name[m_key]]][constant[input_shape]] assign[=] call[name[list], parameter[call[call[name[input]][constant[0]].size, parameter[]]]] call[call[call[name[summary]][name[m_key]]][constant[input_shape]]][constant[0]] assign[=] <ast.UnaryOp object at 0x7da1b16bc520> if call[name[isinstance], parameter[name[output], tuple[[<ast.Name object at 0x7da1b16bc190>, <ast.Name object at 0x7da1b16bd1b0>]]]] begin[:] call[call[name[summary]][name[m_key]]][constant[output_shape]] assign[=] <ast.ListComp object at 0x7da1b16bc070> variable[params] assign[=] constant[0] if <ast.BoolOp object at 0x7da1b16bc0d0> begin[:] <ast.AugAssign object at 0x7da1b16bfe20> call[call[name[summary]][name[m_key]]][constant[trainable]] assign[=] name[module].weight.requires_grad if <ast.BoolOp object at 0x7da1b16bddb0> begin[:] <ast.AugAssign object at 0x7da1b16be440> call[call[name[summary]][name[m_key]]][constant[nb_params]] assign[=] name[params] if <ast.BoolOp object at 0x7da1b16bd990> begin[:] call[name[hooks].append, parameter[call[name[module].register_forward_hook, parameter[name[hook]]]]] if call[name[torch].cuda.is_available, parameter[]] begin[:] variable[dtype] assign[=] name[torch].cuda.FloatTensor variable[model] assign[=] call[name[model].cuda, parameter[]] if call[name[isinstance], parameter[call[name[input_size]][constant[0]], tuple[[<ast.Name object at 0x7da1b16bda20>, <ast.Name object at 0x7da1b16bd750>]]]] begin[:] variable[x] assign[=] <ast.ListComp object at 0x7da1b16bd9f0> variable[summary] assign[=] call[name[OrderedDict], parameter[]] variable[hooks] assign[=] list[[]] call[name[model].apply, parameter[name[register_hook]]] call[name[model], parameter[name[x]]] for taget[name[h]] in starred[name[hooks]] begin[:] call[name[h].remove, parameter[]] call[name[print], parameter[constant[----------------------------------------------------------------]]] variable[line_new] assign[=] call[constant[{:>20} {:>25} {:>15}].format, parameter[constant[Layer (type)], constant[Output Shape], constant[Param #]]] call[name[print], parameter[name[line_new]]] call[name[print], parameter[constant[================================================================]]] variable[total_params] assign[=] constant[0] variable[trainable_params] assign[=] constant[0] for taget[name[layer]] in starred[name[summary]] begin[:] variable[line_new] assign[=] call[constant[{:>20} {:>25} {:>15}].format, parameter[name[layer], call[name[str], parameter[call[call[name[summary]][name[layer]]][constant[output_shape]]]], call[constant[{0:,}].format, parameter[call[call[name[summary]][name[layer]]][constant[nb_params]]]]]] <ast.AugAssign object at 0x7da204347eb0> if compare[constant[trainable] in call[name[summary]][name[layer]]] begin[:] if compare[call[call[name[summary]][name[layer]]][constant[trainable]] equal[==] constant[True]] begin[:] <ast.AugAssign object at 0x7da204347ee0> call[name[print], parameter[name[line_new]]] call[name[print], parameter[constant[================================================================]]] call[name[print], parameter[call[constant[Total params: {0:,}].format, parameter[name[total_params]]]]] call[name[print], parameter[call[constant[Trainable params: {0:,}].format, parameter[name[trainable_params]]]]] call[name[print], parameter[call[constant[Non-trainable params: {0:,}].format, parameter[binary_operation[name[total_params] - name[trainable_params]]]]]] call[name[print], parameter[constant[----------------------------------------------------------------]]]
keyword[def] identifier[summary] ( identifier[model] , identifier[input_size] ): literal[string] keyword[def] identifier[register_hook] ( identifier[module] ): keyword[def] identifier[hook] ( identifier[module] , identifier[input] , identifier[output] ): identifier[class_name] = identifier[str] ( identifier[module] . identifier[__class__] ). identifier[split] ( literal[string] )[- literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ] identifier[module_idx] = identifier[len] ( identifier[summary] ) identifier[m_key] = literal[string] %( identifier[class_name] , identifier[module_idx] + literal[int] ) identifier[summary] [ identifier[m_key] ]= identifier[OrderedDict] () identifier[summary] [ identifier[m_key] ][ literal[string] ]= identifier[list] ( identifier[input] [ literal[int] ]. identifier[size] ()) identifier[summary] [ identifier[m_key] ][ literal[string] ][ literal[int] ]=- literal[int] keyword[if] identifier[isinstance] ( identifier[output] ,( identifier[list] , identifier[tuple] )): identifier[summary] [ identifier[m_key] ][ literal[string] ]=[[- literal[int] ]+ identifier[list] ( identifier[o] . identifier[size] ())[ literal[int] :] keyword[for] identifier[o] keyword[in] identifier[output] ] keyword[else] : identifier[summary] [ identifier[m_key] ][ literal[string] ]= identifier[list] ( identifier[output] . identifier[size] ()) identifier[summary] [ identifier[m_key] ][ literal[string] ][ literal[int] ]=- literal[int] identifier[params] = literal[int] keyword[if] identifier[hasattr] ( identifier[module] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[module] . identifier[weight] , literal[string] ): identifier[params] += identifier[torch] . identifier[prod] ( identifier[torch] . identifier[LongTensor] ( identifier[list] ( identifier[module] . identifier[weight] . identifier[size] ()))) identifier[summary] [ identifier[m_key] ][ literal[string] ]= identifier[module] . identifier[weight] . identifier[requires_grad] keyword[if] identifier[hasattr] ( identifier[module] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[module] . identifier[bias] , literal[string] ): identifier[params] += identifier[torch] . identifier[prod] ( identifier[torch] . identifier[LongTensor] ( identifier[list] ( identifier[module] . identifier[bias] . identifier[size] ()))) identifier[summary] [ identifier[m_key] ][ literal[string] ]= identifier[params] keyword[if] ( keyword[not] identifier[isinstance] ( identifier[module] , identifier[nn] . identifier[Sequential] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[module] , identifier[nn] . identifier[ModuleList] ) keyword[and] keyword[not] ( identifier[module] == identifier[model] )): identifier[hooks] . identifier[append] ( identifier[module] . identifier[register_forward_hook] ( identifier[hook] )) keyword[if] identifier[torch] . identifier[cuda] . identifier[is_available] (): identifier[dtype] = identifier[torch] . identifier[cuda] . identifier[FloatTensor] identifier[model] = identifier[model] . identifier[cuda] () keyword[else] : identifier[dtype] = identifier[torch] . identifier[FloatTensor] identifier[model] = identifier[model] . identifier[cpu] () keyword[if] identifier[isinstance] ( identifier[input_size] [ literal[int] ],( identifier[list] , identifier[tuple] )): identifier[x] =[ identifier[Variable] ( identifier[torch] . identifier[rand] ( literal[int] ,* identifier[in_size] )). identifier[type] ( identifier[dtype] ) keyword[for] identifier[in_size] keyword[in] identifier[input_size] ] keyword[else] : identifier[x] = identifier[Variable] ( identifier[torch] . identifier[rand] ( literal[int] ,* identifier[input_size] )). identifier[type] ( identifier[dtype] ) identifier[summary] = identifier[OrderedDict] () identifier[hooks] =[] identifier[model] . identifier[apply] ( identifier[register_hook] ) identifier[model] ( identifier[x] ) keyword[for] identifier[h] keyword[in] identifier[hooks] : identifier[h] . identifier[remove] () identifier[print] ( literal[string] ) identifier[line_new] = literal[string] . identifier[format] ( literal[string] , literal[string] , literal[string] ) identifier[print] ( identifier[line_new] ) identifier[print] ( literal[string] ) identifier[total_params] = literal[int] identifier[trainable_params] = literal[int] keyword[for] identifier[layer] keyword[in] identifier[summary] : identifier[line_new] = literal[string] . identifier[format] ( identifier[layer] , identifier[str] ( identifier[summary] [ identifier[layer] ][ literal[string] ]), literal[string] . identifier[format] ( identifier[summary] [ identifier[layer] ][ literal[string] ])) identifier[total_params] += identifier[summary] [ identifier[layer] ][ literal[string] ] keyword[if] literal[string] keyword[in] identifier[summary] [ identifier[layer] ]: keyword[if] identifier[summary] [ identifier[layer] ][ literal[string] ]== keyword[True] : identifier[trainable_params] += identifier[summary] [ identifier[layer] ][ literal[string] ] identifier[print] ( identifier[line_new] ) identifier[print] ( literal[string] ) identifier[print] ( literal[string] . identifier[format] ( identifier[total_params] )) identifier[print] ( literal[string] . identifier[format] ( identifier[trainable_params] )) identifier[print] ( literal[string] . identifier[format] ( identifier[total_params] - identifier[trainable_params] )) identifier[print] ( literal[string] )
def summary(model, input_size): """ Print summary of the model """ def register_hook(module): def hook(module, input, output): class_name = str(module.__class__).split('.')[-1].split("'")[0] module_idx = len(summary) m_key = '%s-%i' % (class_name, module_idx + 1) summary[m_key] = OrderedDict() summary[m_key]['input_shape'] = list(input[0].size()) summary[m_key]['input_shape'][0] = -1 if isinstance(output, (list, tuple)): summary[m_key]['output_shape'] = [[-1] + list(o.size())[1:] for o in output] # depends on [control=['if'], data=[]] else: summary[m_key]['output_shape'] = list(output.size()) summary[m_key]['output_shape'][0] = -1 params = 0 if hasattr(module, 'weight') and hasattr(module.weight, 'size'): params += torch.prod(torch.LongTensor(list(module.weight.size()))) summary[m_key]['trainable'] = module.weight.requires_grad # depends on [control=['if'], data=[]] if hasattr(module, 'bias') and hasattr(module.bias, 'size'): params += torch.prod(torch.LongTensor(list(module.bias.size()))) # depends on [control=['if'], data=[]] summary[m_key]['nb_params'] = params if not isinstance(module, nn.Sequential) and (not isinstance(module, nn.ModuleList)) and (not module == model): hooks.append(module.register_forward_hook(hook)) # depends on [control=['if'], data=[]] if torch.cuda.is_available(): dtype = torch.cuda.FloatTensor model = model.cuda() # depends on [control=['if'], data=[]] else: dtype = torch.FloatTensor model = model.cpu() # check if there are multiple inputs to the network if isinstance(input_size[0], (list, tuple)): x = [Variable(torch.rand(2, *in_size)).type(dtype) for in_size in input_size] # depends on [control=['if'], data=[]] else: x = Variable(torch.rand(2, *input_size)).type(dtype) # print(type(x[0])) # create properties summary = OrderedDict() hooks = [] # register hook model.apply(register_hook) # make a forward pass # print(x.shape) model(x) # remove these hooks for h in hooks: h.remove() # depends on [control=['for'], data=['h']] print('----------------------------------------------------------------') line_new = '{:>20} {:>25} {:>15}'.format('Layer (type)', 'Output Shape', 'Param #') print(line_new) print('================================================================') total_params = 0 trainable_params = 0 for layer in summary: # input_shape, output_shape, trainable, nb_params line_new = '{:>20} {:>25} {:>15}'.format(layer, str(summary[layer]['output_shape']), '{0:,}'.format(summary[layer]['nb_params'])) total_params += summary[layer]['nb_params'] if 'trainable' in summary[layer]: if summary[layer]['trainable'] == True: trainable_params += summary[layer]['nb_params'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] print(line_new) # depends on [control=['for'], data=['layer']] print('================================================================') print('Total params: {0:,}'.format(total_params)) print('Trainable params: {0:,}'.format(trainable_params)) print('Non-trainable params: {0:,}'.format(total_params - trainable_params)) print('----------------------------------------------------------------')
def _set_fcoe_config(self, v, load=False): """ Setter method for fcoe_config, mapped from YANG variable /rbridge_id/fcoe_config (container) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_config() directly. YANG Description: This provides the grouping of all FCoE map configuration elements. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=fcoe_config.fcoe_config, is_container='container', presence=False, yang_name="fcoe-config", rest_name="fcoe", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'FCoE configuration commands', u'display-when': u'(/vcsmode/vcs-mode = "true")', u'sort-priority': u'RUNNCFG_LEVEL_RBRIDGE', u'cli-suppress-no': None, u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'fcoe', u'cli-mode-name': u'config-rbridge-fcoe'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """fcoe_config must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=fcoe_config.fcoe_config, is_container='container', presence=False, yang_name="fcoe-config", rest_name="fcoe", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'FCoE configuration commands', u'display-when': u'(/vcsmode/vcs-mode = "true")', u'sort-priority': u'RUNNCFG_LEVEL_RBRIDGE', u'cli-suppress-no': None, u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'fcoe', u'cli-mode-name': u'config-rbridge-fcoe'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True)""", }) self.__fcoe_config = t if hasattr(self, '_set'): self._set()
def function[_set_fcoe_config, parameter[self, v, load]]: constant[ Setter method for fcoe_config, mapped from YANG variable /rbridge_id/fcoe_config (container) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_config() directly. YANG Description: This provides the grouping of all FCoE map configuration elements. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18f812b90> name[self].__fcoe_config assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_fcoe_config] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[fcoe_config] . identifier[fcoe_config] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__fcoe_config] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_fcoe_config(self, v, load=False): """ Setter method for fcoe_config, mapped from YANG variable /rbridge_id/fcoe_config (container) If this variable is read-only (config: false) in the source YANG file, then _set_fcoe_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_fcoe_config() directly. YANG Description: This provides the grouping of all FCoE map configuration elements. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=fcoe_config.fcoe_config, is_container='container', presence=False, yang_name='fcoe-config', rest_name='fcoe', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'FCoE configuration commands', u'display-when': u'(/vcsmode/vcs-mode = "true")', u'sort-priority': u'RUNNCFG_LEVEL_RBRIDGE', u'cli-suppress-no': None, u'cli-full-command': None, u'cli-add-mode': None, u'alt-name': u'fcoe', u'cli-mode-name': u'config-rbridge-fcoe'}}, namespace='urn:brocade.com:mgmt:brocade-fcoe', defining_module='brocade-fcoe', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'fcoe_config must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=fcoe_config.fcoe_config, is_container=\'container\', presence=False, yang_name="fcoe-config", rest_name="fcoe", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'FCoE configuration commands\', u\'display-when\': u\'(/vcsmode/vcs-mode = "true")\', u\'sort-priority\': u\'RUNNCFG_LEVEL_RBRIDGE\', u\'cli-suppress-no\': None, u\'cli-full-command\': None, u\'cli-add-mode\': None, u\'alt-name\': u\'fcoe\', u\'cli-mode-name\': u\'config-rbridge-fcoe\'}}, namespace=\'urn:brocade.com:mgmt:brocade-fcoe\', defining_module=\'brocade-fcoe\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__fcoe_config = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def archive(context, clear_log, items, path, name): """ Archive the history log and all results/log files. After archive is created optionally clear the history log. """ history_log = context.obj['history_log'] no_color = context.obj['no_color'] with open(history_log, 'r') as f: # Get history items history_items = f.readlines() if items: # Split comma separated list and cast indices to integer. items = [int(item) for item in items.split(',')] lines = [] for index in items: lines.append(history_items[len(history_items) - index]) history_items = lines with tempfile.TemporaryDirectory() as temp_dir: for item in history_items: # Copy log and results file, # update results file with relative path. archive_history_item(item, temp_dir, no_color) file_name = ''.join([name, '.tar.gz']) archive_path = os.path.join(path, file_name) with tarfile.open(archive_path, "w:gz") as tar: # Create tar archive tar.add(temp_dir, arcname='results') if clear_log: if items: # Remove duplicates to prevent unwanted deletion. items = list(set(items)) # Must delete items from bottom to top of history file # to preserve indices. (Index 0 is last item in file) items.sort() for index in items: context.invoke(delete, item=index) else: context.invoke(clear) click.echo( 'Exported results history to archive: {0}'.format(archive_path) )
def function[archive, parameter[context, clear_log, items, path, name]]: constant[ Archive the history log and all results/log files. After archive is created optionally clear the history log. ] variable[history_log] assign[=] call[name[context].obj][constant[history_log]] variable[no_color] assign[=] call[name[context].obj][constant[no_color]] with call[name[open], parameter[name[history_log], constant[r]]] begin[:] variable[history_items] assign[=] call[name[f].readlines, parameter[]] if name[items] begin[:] variable[items] assign[=] <ast.ListComp object at 0x7da1b1a204f0> variable[lines] assign[=] list[[]] for taget[name[index]] in starred[name[items]] begin[:] call[name[lines].append, parameter[call[name[history_items]][binary_operation[call[name[len], parameter[name[history_items]]] - name[index]]]]] variable[history_items] assign[=] name[lines] with call[name[tempfile].TemporaryDirectory, parameter[]] begin[:] for taget[name[item]] in starred[name[history_items]] begin[:] call[name[archive_history_item], parameter[name[item], name[temp_dir], name[no_color]]] variable[file_name] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b1a21d50>, <ast.Constant object at 0x7da1b1a20dc0>]]]] variable[archive_path] assign[=] call[name[os].path.join, parameter[name[path], name[file_name]]] with call[name[tarfile].open, parameter[name[archive_path], constant[w:gz]]] begin[:] call[name[tar].add, parameter[name[temp_dir]]] if name[clear_log] begin[:] if name[items] begin[:] variable[items] assign[=] call[name[list], parameter[call[name[set], parameter[name[items]]]]] call[name[items].sort, parameter[]] for taget[name[index]] in starred[name[items]] begin[:] call[name[context].invoke, parameter[name[delete]]] call[name[click].echo, parameter[call[constant[Exported results history to archive: {0}].format, parameter[name[archive_path]]]]]
keyword[def] identifier[archive] ( identifier[context] , identifier[clear_log] , identifier[items] , identifier[path] , identifier[name] ): literal[string] identifier[history_log] = identifier[context] . identifier[obj] [ literal[string] ] identifier[no_color] = identifier[context] . identifier[obj] [ literal[string] ] keyword[with] identifier[open] ( identifier[history_log] , literal[string] ) keyword[as] identifier[f] : identifier[history_items] = identifier[f] . identifier[readlines] () keyword[if] identifier[items] : identifier[items] =[ identifier[int] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[items] . identifier[split] ( literal[string] )] identifier[lines] =[] keyword[for] identifier[index] keyword[in] identifier[items] : identifier[lines] . identifier[append] ( identifier[history_items] [ identifier[len] ( identifier[history_items] )- identifier[index] ]) identifier[history_items] = identifier[lines] keyword[with] identifier[tempfile] . identifier[TemporaryDirectory] () keyword[as] identifier[temp_dir] : keyword[for] identifier[item] keyword[in] identifier[history_items] : identifier[archive_history_item] ( identifier[item] , identifier[temp_dir] , identifier[no_color] ) identifier[file_name] = literal[string] . identifier[join] ([ identifier[name] , literal[string] ]) identifier[archive_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[file_name] ) keyword[with] identifier[tarfile] . identifier[open] ( identifier[archive_path] , literal[string] ) keyword[as] identifier[tar] : identifier[tar] . identifier[add] ( identifier[temp_dir] , identifier[arcname] = literal[string] ) keyword[if] identifier[clear_log] : keyword[if] identifier[items] : identifier[items] = identifier[list] ( identifier[set] ( identifier[items] )) identifier[items] . identifier[sort] () keyword[for] identifier[index] keyword[in] identifier[items] : identifier[context] . identifier[invoke] ( identifier[delete] , identifier[item] = identifier[index] ) keyword[else] : identifier[context] . identifier[invoke] ( identifier[clear] ) identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[archive_path] ) )
def archive(context, clear_log, items, path, name): """ Archive the history log and all results/log files. After archive is created optionally clear the history log. """ history_log = context.obj['history_log'] no_color = context.obj['no_color'] with open(history_log, 'r') as f: # Get history items history_items = f.readlines() # depends on [control=['with'], data=['f']] if items: # Split comma separated list and cast indices to integer. items = [int(item) for item in items.split(',')] lines = [] for index in items: lines.append(history_items[len(history_items) - index]) # depends on [control=['for'], data=['index']] history_items = lines # depends on [control=['if'], data=[]] with tempfile.TemporaryDirectory() as temp_dir: for item in history_items: # Copy log and results file, # update results file with relative path. archive_history_item(item, temp_dir, no_color) # depends on [control=['for'], data=['item']] file_name = ''.join([name, '.tar.gz']) archive_path = os.path.join(path, file_name) with tarfile.open(archive_path, 'w:gz') as tar: # Create tar archive tar.add(temp_dir, arcname='results') # depends on [control=['with'], data=['tar']] # depends on [control=['with'], data=['temp_dir']] if clear_log: if items: # Remove duplicates to prevent unwanted deletion. items = list(set(items)) # Must delete items from bottom to top of history file # to preserve indices. (Index 0 is last item in file) items.sort() for index in items: context.invoke(delete, item=index) # depends on [control=['for'], data=['index']] # depends on [control=['if'], data=[]] else: context.invoke(clear) # depends on [control=['if'], data=[]] click.echo('Exported results history to archive: {0}'.format(archive_path))
def slice(string, separator="-", start=None, end=None): """Slice out a segment of a string, which is splitted on both the wildcards and the separator passed in, if any """ # split by wildcards/keywords first # AR-{sampleType}-{parentId}{alpha:3a2d} segments = filter(None, re.split('(\{.+?\})', string)) # ['AR-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}'] if separator: # Keep track of singleton separators as empties # We need to do this to prevent duplicates later, when splitting segments = map(lambda seg: seg!=separator and seg or "", segments) # ['AR-', '{sampleType}', '', '{parentId}', '{alpha:3a2d}'] # Split each segment at the given separator segments = map(lambda seg: split(seg, separator), segments) # [['AR', ''], ['{sampleType}'], [''], ['{parentId}'], ['{alpha:3a2d}']] # Flatten the list segments = list(itertools.chain.from_iterable(segments)) # ['AR', '', '{sampleType}', '', '{parentId}', '{alpha:3a2d}'] # And replace empties with separator segments = map(lambda seg: seg!="" and seg or separator, segments) # ['AR', '-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}'] # Get the start and end positions from the segments without separator cleaned_segments = filter(lambda seg: seg!=separator, segments) start_pos = to_int(start, 0) # Note "end" is not a position, but the number of elements to join! end_pos = to_int(end, len(cleaned_segments) - start_pos) + start_pos - 1 # Map the positions against the segments with separator start = segments.index(cleaned_segments[start_pos]) end = segments.index(cleaned_segments[end_pos]) + 1 # Return all segments joined sliced_parts = segments[start:end] return "".join(sliced_parts)
def function[slice, parameter[string, separator, start, end]]: constant[Slice out a segment of a string, which is splitted on both the wildcards and the separator passed in, if any ] variable[segments] assign[=] call[name[filter], parameter[constant[None], call[name[re].split, parameter[constant[(\{.+?\})], name[string]]]]] if name[separator] begin[:] variable[segments] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da204344b80>, name[segments]]] variable[segments] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18bc71180>, name[segments]]] variable[segments] assign[=] call[name[list], parameter[call[name[itertools].chain.from_iterable, parameter[name[segments]]]]] variable[segments] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18dc04610>, name[segments]]] variable[cleaned_segments] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da207f00130>, name[segments]]] variable[start_pos] assign[=] call[name[to_int], parameter[name[start], constant[0]]] variable[end_pos] assign[=] binary_operation[binary_operation[call[name[to_int], parameter[name[end], binary_operation[call[name[len], parameter[name[cleaned_segments]]] - name[start_pos]]]] + name[start_pos]] - constant[1]] variable[start] assign[=] call[name[segments].index, parameter[call[name[cleaned_segments]][name[start_pos]]]] variable[end] assign[=] binary_operation[call[name[segments].index, parameter[call[name[cleaned_segments]][name[end_pos]]]] + constant[1]] variable[sliced_parts] assign[=] call[name[segments]][<ast.Slice object at 0x7da2047ebbe0>] return[call[constant[].join, parameter[name[sliced_parts]]]]
keyword[def] identifier[slice] ( identifier[string] , identifier[separator] = literal[string] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ): literal[string] identifier[segments] = identifier[filter] ( keyword[None] , identifier[re] . identifier[split] ( literal[string] , identifier[string] )) keyword[if] identifier[separator] : identifier[segments] = identifier[map] ( keyword[lambda] identifier[seg] : identifier[seg] != identifier[separator] keyword[and] identifier[seg] keyword[or] literal[string] , identifier[segments] ) identifier[segments] = identifier[map] ( keyword[lambda] identifier[seg] : identifier[split] ( identifier[seg] , identifier[separator] ), identifier[segments] ) identifier[segments] = identifier[list] ( identifier[itertools] . identifier[chain] . identifier[from_iterable] ( identifier[segments] )) identifier[segments] = identifier[map] ( keyword[lambda] identifier[seg] : identifier[seg] != literal[string] keyword[and] identifier[seg] keyword[or] identifier[separator] , identifier[segments] ) identifier[cleaned_segments] = identifier[filter] ( keyword[lambda] identifier[seg] : identifier[seg] != identifier[separator] , identifier[segments] ) identifier[start_pos] = identifier[to_int] ( identifier[start] , literal[int] ) identifier[end_pos] = identifier[to_int] ( identifier[end] , identifier[len] ( identifier[cleaned_segments] )- identifier[start_pos] )+ identifier[start_pos] - literal[int] identifier[start] = identifier[segments] . identifier[index] ( identifier[cleaned_segments] [ identifier[start_pos] ]) identifier[end] = identifier[segments] . identifier[index] ( identifier[cleaned_segments] [ identifier[end_pos] ])+ literal[int] identifier[sliced_parts] = identifier[segments] [ identifier[start] : identifier[end] ] keyword[return] literal[string] . identifier[join] ( identifier[sliced_parts] )
def slice(string, separator='-', start=None, end=None): """Slice out a segment of a string, which is splitted on both the wildcards and the separator passed in, if any """ # split by wildcards/keywords first # AR-{sampleType}-{parentId}{alpha:3a2d} segments = filter(None, re.split('(\\{.+?\\})', string)) # ['AR-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}'] if separator: # Keep track of singleton separators as empties # We need to do this to prevent duplicates later, when splitting segments = map(lambda seg: seg != separator and seg or '', segments) # ['AR-', '{sampleType}', '', '{parentId}', '{alpha:3a2d}'] # Split each segment at the given separator segments = map(lambda seg: split(seg, separator), segments) # [['AR', ''], ['{sampleType}'], [''], ['{parentId}'], ['{alpha:3a2d}']] # Flatten the list segments = list(itertools.chain.from_iterable(segments)) # ['AR', '', '{sampleType}', '', '{parentId}', '{alpha:3a2d}'] # And replace empties with separator segments = map(lambda seg: seg != '' and seg or separator, segments) # depends on [control=['if'], data=[]] # ['AR', '-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}'] # Get the start and end positions from the segments without separator cleaned_segments = filter(lambda seg: seg != separator, segments) start_pos = to_int(start, 0) # Note "end" is not a position, but the number of elements to join! end_pos = to_int(end, len(cleaned_segments) - start_pos) + start_pos - 1 # Map the positions against the segments with separator start = segments.index(cleaned_segments[start_pos]) end = segments.index(cleaned_segments[end_pos]) + 1 # Return all segments joined sliced_parts = segments[start:end] return ''.join(sliced_parts)
def creator(request, slug): """ :param request: Django request object. :param event_id: The `id` associated with the event. :param is_preview: Should the listing page be generated as a preview? This will allow preview specific actions to be done in the template such as turning off tracking options or adding links to the admin. :return: TemplateResponse """ # If this is a preview make sure the user has appropriate permissions. item = get_object_or_404(models.CreatorBase.objects.visible(), slug=slug) if not item: raise Http404 context = RequestContext(request, { 'page': item, 'creator': item, }) template = 'gk_collections/creator.html' return TemplateResponse(request, template, context)
def function[creator, parameter[request, slug]]: constant[ :param request: Django request object. :param event_id: The `id` associated with the event. :param is_preview: Should the listing page be generated as a preview? This will allow preview specific actions to be done in the template such as turning off tracking options or adding links to the admin. :return: TemplateResponse ] variable[item] assign[=] call[name[get_object_or_404], parameter[call[name[models].CreatorBase.objects.visible, parameter[]]]] if <ast.UnaryOp object at 0x7da20c9936d0> begin[:] <ast.Raise object at 0x7da20c9911b0> variable[context] assign[=] call[name[RequestContext], parameter[name[request], dictionary[[<ast.Constant object at 0x7da20c993220>, <ast.Constant object at 0x7da20c993cd0>], [<ast.Name object at 0x7da20c993df0>, <ast.Name object at 0x7da20c992140>]]]] variable[template] assign[=] constant[gk_collections/creator.html] return[call[name[TemplateResponse], parameter[name[request], name[template], name[context]]]]
keyword[def] identifier[creator] ( identifier[request] , identifier[slug] ): literal[string] identifier[item] = identifier[get_object_or_404] ( identifier[models] . identifier[CreatorBase] . identifier[objects] . identifier[visible] (), identifier[slug] = identifier[slug] ) keyword[if] keyword[not] identifier[item] : keyword[raise] identifier[Http404] identifier[context] = identifier[RequestContext] ( identifier[request] ,{ literal[string] : identifier[item] , literal[string] : identifier[item] , }) identifier[template] = literal[string] keyword[return] identifier[TemplateResponse] ( identifier[request] , identifier[template] , identifier[context] )
def creator(request, slug): """ :param request: Django request object. :param event_id: The `id` associated with the event. :param is_preview: Should the listing page be generated as a preview? This will allow preview specific actions to be done in the template such as turning off tracking options or adding links to the admin. :return: TemplateResponse """ # If this is a preview make sure the user has appropriate permissions. item = get_object_or_404(models.CreatorBase.objects.visible(), slug=slug) if not item: raise Http404 # depends on [control=['if'], data=[]] context = RequestContext(request, {'page': item, 'creator': item}) template = 'gk_collections/creator.html' return TemplateResponse(request, template, context)
def _GetNextLogCountPerToken(token): """Wrapper for _log_counter_per_token. Args: token: The token for which to look up the count. Returns: The number of times this function has been called with *token* as an argument (starting at 0) """ global _log_counter_per_token # pylint: disable=global-variable-not-assigned _log_counter_per_token[token] = 1 + _log_counter_per_token.get(token, -1) return _log_counter_per_token[token]
def function[_GetNextLogCountPerToken, parameter[token]]: constant[Wrapper for _log_counter_per_token. Args: token: The token for which to look up the count. Returns: The number of times this function has been called with *token* as an argument (starting at 0) ] <ast.Global object at 0x7da1b01dabc0> call[name[_log_counter_per_token]][name[token]] assign[=] binary_operation[constant[1] + call[name[_log_counter_per_token].get, parameter[name[token], <ast.UnaryOp object at 0x7da1b007a5c0>]]] return[call[name[_log_counter_per_token]][name[token]]]
keyword[def] identifier[_GetNextLogCountPerToken] ( identifier[token] ): literal[string] keyword[global] identifier[_log_counter_per_token] identifier[_log_counter_per_token] [ identifier[token] ]= literal[int] + identifier[_log_counter_per_token] . identifier[get] ( identifier[token] ,- literal[int] ) keyword[return] identifier[_log_counter_per_token] [ identifier[token] ]
def _GetNextLogCountPerToken(token): """Wrapper for _log_counter_per_token. Args: token: The token for which to look up the count. Returns: The number of times this function has been called with *token* as an argument (starting at 0) """ global _log_counter_per_token # pylint: disable=global-variable-not-assigned _log_counter_per_token[token] = 1 + _log_counter_per_token.get(token, -1) return _log_counter_per_token[token]
def parse(self, string, parent): """Parses all the value code elements from the specified string.""" result = {} for member in self.RE_MEMBERS.finditer(string): mems = self._process_member(member, parent, string) #The regex match could contain multiple members that were defined #on the same line in the code file. for onemem in mems: result[onemem.name.lower()] = onemem return result
def function[parse, parameter[self, string, parent]]: constant[Parses all the value code elements from the specified string.] variable[result] assign[=] dictionary[[], []] for taget[name[member]] in starred[call[name[self].RE_MEMBERS.finditer, parameter[name[string]]]] begin[:] variable[mems] assign[=] call[name[self]._process_member, parameter[name[member], name[parent], name[string]]] for taget[name[onemem]] in starred[name[mems]] begin[:] call[name[result]][call[name[onemem].name.lower, parameter[]]] assign[=] name[onemem] return[name[result]]
keyword[def] identifier[parse] ( identifier[self] , identifier[string] , identifier[parent] ): literal[string] identifier[result] ={} keyword[for] identifier[member] keyword[in] identifier[self] . identifier[RE_MEMBERS] . identifier[finditer] ( identifier[string] ): identifier[mems] = identifier[self] . identifier[_process_member] ( identifier[member] , identifier[parent] , identifier[string] ) keyword[for] identifier[onemem] keyword[in] identifier[mems] : identifier[result] [ identifier[onemem] . identifier[name] . identifier[lower] ()]= identifier[onemem] keyword[return] identifier[result]
def parse(self, string, parent): """Parses all the value code elements from the specified string.""" result = {} for member in self.RE_MEMBERS.finditer(string): mems = self._process_member(member, parent, string) #The regex match could contain multiple members that were defined #on the same line in the code file. for onemem in mems: result[onemem.name.lower()] = onemem # depends on [control=['for'], data=['onemem']] # depends on [control=['for'], data=['member']] return result
def render(self, **kwargs): """Renders the HTML representation of the element.""" if isinstance(self._parent, GeoJson): keys = tuple(self._parent.data['features'][0]['properties'].keys()) self.warn_for_geometry_collections() elif isinstance(self._parent, TopoJson): obj_name = self._parent.object_path.split('.')[-1] keys = tuple(self._parent.data['objects'][obj_name][ 'geometries'][0]['properties'].keys()) else: raise TypeError('You cannot add a GeoJsonTooltip to anything else ' 'than a GeoJson or TopoJson object.') keys = tuple(x for x in keys if x not in ('style', 'highlight')) for value in self.fields: assert value in keys, ('The field {} is not available in the data. ' 'Choose from: {}.'.format(value, keys)) super(GeoJsonTooltip, self).render(**kwargs)
def function[render, parameter[self]]: constant[Renders the HTML representation of the element.] if call[name[isinstance], parameter[name[self]._parent, name[GeoJson]]] begin[:] variable[keys] assign[=] call[name[tuple], parameter[call[call[call[call[name[self]._parent.data][constant[features]]][constant[0]]][constant[properties]].keys, parameter[]]]] call[name[self].warn_for_geometry_collections, parameter[]] variable[keys] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da18c4ccca0>]] for taget[name[value]] in starred[name[self].fields] begin[:] assert[compare[name[value] in name[keys]]] call[call[name[super], parameter[name[GeoJsonTooltip], name[self]]].render, parameter[]]
keyword[def] identifier[render] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[isinstance] ( identifier[self] . identifier[_parent] , identifier[GeoJson] ): identifier[keys] = identifier[tuple] ( identifier[self] . identifier[_parent] . identifier[data] [ literal[string] ][ literal[int] ][ literal[string] ]. identifier[keys] ()) identifier[self] . identifier[warn_for_geometry_collections] () keyword[elif] identifier[isinstance] ( identifier[self] . identifier[_parent] , identifier[TopoJson] ): identifier[obj_name] = identifier[self] . identifier[_parent] . identifier[object_path] . identifier[split] ( literal[string] )[- literal[int] ] identifier[keys] = identifier[tuple] ( identifier[self] . identifier[_parent] . identifier[data] [ literal[string] ][ identifier[obj_name] ][ literal[string] ][ literal[int] ][ literal[string] ]. identifier[keys] ()) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) identifier[keys] = identifier[tuple] ( identifier[x] keyword[for] identifier[x] keyword[in] identifier[keys] keyword[if] identifier[x] keyword[not] keyword[in] ( literal[string] , literal[string] )) keyword[for] identifier[value] keyword[in] identifier[self] . identifier[fields] : keyword[assert] identifier[value] keyword[in] identifier[keys] ,( literal[string] literal[string] . identifier[format] ( identifier[value] , identifier[keys] )) identifier[super] ( identifier[GeoJsonTooltip] , identifier[self] ). identifier[render] (** identifier[kwargs] )
def render(self, **kwargs): """Renders the HTML representation of the element.""" if isinstance(self._parent, GeoJson): keys = tuple(self._parent.data['features'][0]['properties'].keys()) self.warn_for_geometry_collections() # depends on [control=['if'], data=[]] elif isinstance(self._parent, TopoJson): obj_name = self._parent.object_path.split('.')[-1] keys = tuple(self._parent.data['objects'][obj_name]['geometries'][0]['properties'].keys()) # depends on [control=['if'], data=[]] else: raise TypeError('You cannot add a GeoJsonTooltip to anything else than a GeoJson or TopoJson object.') keys = tuple((x for x in keys if x not in ('style', 'highlight'))) for value in self.fields: assert value in keys, 'The field {} is not available in the data. Choose from: {}.'.format(value, keys) # depends on [control=['for'], data=['value']] super(GeoJsonTooltip, self).render(**kwargs)
def glimpse(self, *tags, compact = False): """Creates a printable table with the most frequently occurring values of each of the requested _tags_, or if none are provided the top authors, journals and citations. The table will be as wide and as tall as the terminal (or 80x24 if there is no terminal) so `print(RC.glimpse())`should always create a nice looking table. Below is a table created from some of the testing files: ``` >>> print(RC.glimpse()) +RecordCollection glimpse made at: 2016-01-01 12:00:00++++++++++++++++++++++++++ |33 Records from testFile++++++++++++++++++++++++++++++++++++++++++++++++++++++| |Columns are ranked by num. of occurrences and are independent of one another++| |-------Top Authors--------+------Top Journals-------+--------Top Cited--------| |1 Girard, S|1 CANADIAN JOURNAL OF PH.|1 LEVY Y, 1975, OPT COMM.| |1 Gilles, H|1 JOURNAL OF THE OPTICAL.|2 GOOS F, 1947, ANN PHYS.| |2 IMBERT, C|2 APPLIED OPTICS|3 LOTSCH HKV, 1970, OPTI.| |2 Pillon, F|2 OPTICS COMMUNICATIONS|4 RENARD RH, 1964, J OPT.| |3 BEAUREGARD, OCD|2 NUOVO CIMENTO DELLA SO.|5 IMBERT C, 1972, PHYS R.| |3 Laroche, M|2 JOURNAL OF THE OPTICAL.|6 ARTMANN K, 1948, ANN P.| |3 HUARD, S|2 JOURNAL OF THE OPTICAL.|6 COSTADEB.O, 1973, PHYS.| |4 PURI, A|2 NOUVELLE REVUE D OPTIQ.|6 ROOSEN G, 1973, CR ACA.| |4 COSTADEB.O|3 PHYSICS REPORTS-REVIEW.|7 Imbert C., 1972, Nouve.| |4 PATTANAYAK, DN|3 PHYSICAL REVIEW LETTERS|8 HOROWITZ BR, 1971, J O.| |4 Gazibegovic, A|3 USPEKHI FIZICHESKIKH N.|8 BRETENAKER F, 1992, PH.| |4 ROOSEN, G|3 APPLIED PHYSICS B-LASE.|8 SCHILLIN.H, 1965, ANN .| |4 BIRMAN, JL|3 AEU-INTERNATIONAL JOUR.|8 FEDOROV FI, 1955, DOKL.| |4 Kaiser, R|3 COMPTES RENDUS HEBDOMA.|8 MAZET A, 1971, CR ACAD.| |5 LEVY, Y|3 CHINESE PHYSICS LETTERS|9 IMBERT C, 1972, CR ACA.| |5 BEAUREGA.OC|3 PHYSICAL REVIEW B|9 LOTSCH HKV, 1971, OPTI.| |5 PAVLOV, VI|3 LETTERE AL NUOVO CIMEN.|9 ASHBY N, 1973, PHYS RE.| |5 BREVIK, I|3 PROGRESS IN QUANTUM EL.|9 BOULWARE DG, 1973, PHY.| >>> ``` # Parameters _tags_ : `str, str, ...` > Any number of tag strings to be made into columns in the output table # Returns `str` > A string containing the table """ return _glimpse(self, *tags, compact = compact)
def function[glimpse, parameter[self]]: constant[Creates a printable table with the most frequently occurring values of each of the requested _tags_, or if none are provided the top authors, journals and citations. The table will be as wide and as tall as the terminal (or 80x24 if there is no terminal) so `print(RC.glimpse())`should always create a nice looking table. Below is a table created from some of the testing files: ``` >>> print(RC.glimpse()) +RecordCollection glimpse made at: 2016-01-01 12:00:00++++++++++++++++++++++++++ |33 Records from testFile++++++++++++++++++++++++++++++++++++++++++++++++++++++| |Columns are ranked by num. of occurrences and are independent of one another++| |-------Top Authors--------+------Top Journals-------+--------Top Cited--------| |1 Girard, S|1 CANADIAN JOURNAL OF PH.|1 LEVY Y, 1975, OPT COMM.| |1 Gilles, H|1 JOURNAL OF THE OPTICAL.|2 GOOS F, 1947, ANN PHYS.| |2 IMBERT, C|2 APPLIED OPTICS|3 LOTSCH HKV, 1970, OPTI.| |2 Pillon, F|2 OPTICS COMMUNICATIONS|4 RENARD RH, 1964, J OPT.| |3 BEAUREGARD, OCD|2 NUOVO CIMENTO DELLA SO.|5 IMBERT C, 1972, PHYS R.| |3 Laroche, M|2 JOURNAL OF THE OPTICAL.|6 ARTMANN K, 1948, ANN P.| |3 HUARD, S|2 JOURNAL OF THE OPTICAL.|6 COSTADEB.O, 1973, PHYS.| |4 PURI, A|2 NOUVELLE REVUE D OPTIQ.|6 ROOSEN G, 1973, CR ACA.| |4 COSTADEB.O|3 PHYSICS REPORTS-REVIEW.|7 Imbert C., 1972, Nouve.| |4 PATTANAYAK, DN|3 PHYSICAL REVIEW LETTERS|8 HOROWITZ BR, 1971, J O.| |4 Gazibegovic, A|3 USPEKHI FIZICHESKIKH N.|8 BRETENAKER F, 1992, PH.| |4 ROOSEN, G|3 APPLIED PHYSICS B-LASE.|8 SCHILLIN.H, 1965, ANN .| |4 BIRMAN, JL|3 AEU-INTERNATIONAL JOUR.|8 FEDOROV FI, 1955, DOKL.| |4 Kaiser, R|3 COMPTES RENDUS HEBDOMA.|8 MAZET A, 1971, CR ACAD.| |5 LEVY, Y|3 CHINESE PHYSICS LETTERS|9 IMBERT C, 1972, CR ACA.| |5 BEAUREGA.OC|3 PHYSICAL REVIEW B|9 LOTSCH HKV, 1971, OPTI.| |5 PAVLOV, VI|3 LETTERE AL NUOVO CIMEN.|9 ASHBY N, 1973, PHYS RE.| |5 BREVIK, I|3 PROGRESS IN QUANTUM EL.|9 BOULWARE DG, 1973, PHY.| >>> ``` # Parameters _tags_ : `str, str, ...` > Any number of tag strings to be made into columns in the output table # Returns `str` > A string containing the table ] return[call[name[_glimpse], parameter[name[self], <ast.Starred object at 0x7da18bccb760>]]]
keyword[def] identifier[glimpse] ( identifier[self] ,* identifier[tags] , identifier[compact] = keyword[False] ): literal[string] keyword[return] identifier[_glimpse] ( identifier[self] ,* identifier[tags] , identifier[compact] = identifier[compact] )
def glimpse(self, *tags, compact=False): """Creates a printable table with the most frequently occurring values of each of the requested _tags_, or if none are provided the top authors, journals and citations. The table will be as wide and as tall as the terminal (or 80x24 if there is no terminal) so `print(RC.glimpse())`should always create a nice looking table. Below is a table created from some of the testing files: ``` >>> print(RC.glimpse()) +RecordCollection glimpse made at: 2016-01-01 12:00:00++++++++++++++++++++++++++ |33 Records from testFile++++++++++++++++++++++++++++++++++++++++++++++++++++++| |Columns are ranked by num. of occurrences and are independent of one another++| |-------Top Authors--------+------Top Journals-------+--------Top Cited--------| |1 Girard, S|1 CANADIAN JOURNAL OF PH.|1 LEVY Y, 1975, OPT COMM.| |1 Gilles, H|1 JOURNAL OF THE OPTICAL.|2 GOOS F, 1947, ANN PHYS.| |2 IMBERT, C|2 APPLIED OPTICS|3 LOTSCH HKV, 1970, OPTI.| |2 Pillon, F|2 OPTICS COMMUNICATIONS|4 RENARD RH, 1964, J OPT.| |3 BEAUREGARD, OCD|2 NUOVO CIMENTO DELLA SO.|5 IMBERT C, 1972, PHYS R.| |3 Laroche, M|2 JOURNAL OF THE OPTICAL.|6 ARTMANN K, 1948, ANN P.| |3 HUARD, S|2 JOURNAL OF THE OPTICAL.|6 COSTADEB.O, 1973, PHYS.| |4 PURI, A|2 NOUVELLE REVUE D OPTIQ.|6 ROOSEN G, 1973, CR ACA.| |4 COSTADEB.O|3 PHYSICS REPORTS-REVIEW.|7 Imbert C., 1972, Nouve.| |4 PATTANAYAK, DN|3 PHYSICAL REVIEW LETTERS|8 HOROWITZ BR, 1971, J O.| |4 Gazibegovic, A|3 USPEKHI FIZICHESKIKH N.|8 BRETENAKER F, 1992, PH.| |4 ROOSEN, G|3 APPLIED PHYSICS B-LASE.|8 SCHILLIN.H, 1965, ANN .| |4 BIRMAN, JL|3 AEU-INTERNATIONAL JOUR.|8 FEDOROV FI, 1955, DOKL.| |4 Kaiser, R|3 COMPTES RENDUS HEBDOMA.|8 MAZET A, 1971, CR ACAD.| |5 LEVY, Y|3 CHINESE PHYSICS LETTERS|9 IMBERT C, 1972, CR ACA.| |5 BEAUREGA.OC|3 PHYSICAL REVIEW B|9 LOTSCH HKV, 1971, OPTI.| |5 PAVLOV, VI|3 LETTERE AL NUOVO CIMEN.|9 ASHBY N, 1973, PHYS RE.| |5 BREVIK, I|3 PROGRESS IN QUANTUM EL.|9 BOULWARE DG, 1973, PHY.| >>> ``` # Parameters _tags_ : `str, str, ...` > Any number of tag strings to be made into columns in the output table # Returns `str` > A string containing the table """ return _glimpse(self, *tags, compact=compact)
def from_file(cls, image_descriptor): """ Return a new |Image| subclass instance loaded from the image file identified by *image_descriptor*, a path or file-like object. """ if is_string(image_descriptor): path = image_descriptor with open(path, 'rb') as f: blob = f.read() stream = BytesIO(blob) filename = os.path.basename(path) else: stream = image_descriptor stream.seek(0) blob = stream.read() filename = None return cls._from_stream(stream, blob, filename)
def function[from_file, parameter[cls, image_descriptor]]: constant[ Return a new |Image| subclass instance loaded from the image file identified by *image_descriptor*, a path or file-like object. ] if call[name[is_string], parameter[name[image_descriptor]]] begin[:] variable[path] assign[=] name[image_descriptor] with call[name[open], parameter[name[path], constant[rb]]] begin[:] variable[blob] assign[=] call[name[f].read, parameter[]] variable[stream] assign[=] call[name[BytesIO], parameter[name[blob]]] variable[filename] assign[=] call[name[os].path.basename, parameter[name[path]]] return[call[name[cls]._from_stream, parameter[name[stream], name[blob], name[filename]]]]
keyword[def] identifier[from_file] ( identifier[cls] , identifier[image_descriptor] ): literal[string] keyword[if] identifier[is_string] ( identifier[image_descriptor] ): identifier[path] = identifier[image_descriptor] keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : identifier[blob] = identifier[f] . identifier[read] () identifier[stream] = identifier[BytesIO] ( identifier[blob] ) identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[path] ) keyword[else] : identifier[stream] = identifier[image_descriptor] identifier[stream] . identifier[seek] ( literal[int] ) identifier[blob] = identifier[stream] . identifier[read] () identifier[filename] = keyword[None] keyword[return] identifier[cls] . identifier[_from_stream] ( identifier[stream] , identifier[blob] , identifier[filename] )
def from_file(cls, image_descriptor): """ Return a new |Image| subclass instance loaded from the image file identified by *image_descriptor*, a path or file-like object. """ if is_string(image_descriptor): path = image_descriptor with open(path, 'rb') as f: blob = f.read() stream = BytesIO(blob) # depends on [control=['with'], data=['f']] filename = os.path.basename(path) # depends on [control=['if'], data=[]] else: stream = image_descriptor stream.seek(0) blob = stream.read() filename = None return cls._from_stream(stream, blob, filename)
def _group_by_area(self, datasets): """Group datasets by their area.""" def _area_id(area_def): return area_def.name + str(area_def.area_extent) + str(area_def.shape) # get all of the datasets stored by area area_datasets = {} for x in datasets: area_id = _area_id(x.attrs['area']) area, ds_list = area_datasets.setdefault(area_id, (x.attrs['area'], [])) ds_list.append(x) return area_datasets
def function[_group_by_area, parameter[self, datasets]]: constant[Group datasets by their area.] def function[_area_id, parameter[area_def]]: return[binary_operation[binary_operation[name[area_def].name + call[name[str], parameter[name[area_def].area_extent]]] + call[name[str], parameter[name[area_def].shape]]]] variable[area_datasets] assign[=] dictionary[[], []] for taget[name[x]] in starred[name[datasets]] begin[:] variable[area_id] assign[=] call[name[_area_id], parameter[call[name[x].attrs][constant[area]]]] <ast.Tuple object at 0x7da1b1d6fa00> assign[=] call[name[area_datasets].setdefault, parameter[name[area_id], tuple[[<ast.Subscript object at 0x7da1b1d6ea70>, <ast.List object at 0x7da1b1d6fbe0>]]]] call[name[ds_list].append, parameter[name[x]]] return[name[area_datasets]]
keyword[def] identifier[_group_by_area] ( identifier[self] , identifier[datasets] ): literal[string] keyword[def] identifier[_area_id] ( identifier[area_def] ): keyword[return] identifier[area_def] . identifier[name] + identifier[str] ( identifier[area_def] . identifier[area_extent] )+ identifier[str] ( identifier[area_def] . identifier[shape] ) identifier[area_datasets] ={} keyword[for] identifier[x] keyword[in] identifier[datasets] : identifier[area_id] = identifier[_area_id] ( identifier[x] . identifier[attrs] [ literal[string] ]) identifier[area] , identifier[ds_list] = identifier[area_datasets] . identifier[setdefault] ( identifier[area_id] ,( identifier[x] . identifier[attrs] [ literal[string] ],[])) identifier[ds_list] . identifier[append] ( identifier[x] ) keyword[return] identifier[area_datasets]
def _group_by_area(self, datasets): """Group datasets by their area.""" def _area_id(area_def): return area_def.name + str(area_def.area_extent) + str(area_def.shape) # get all of the datasets stored by area area_datasets = {} for x in datasets: area_id = _area_id(x.attrs['area']) (area, ds_list) = area_datasets.setdefault(area_id, (x.attrs['area'], [])) ds_list.append(x) # depends on [control=['for'], data=['x']] return area_datasets
def restart_local(drain=False): ''' Restart the traffic_manager and traffic_server processes on the local node. drain This option modifies the restart behavior such that ``traffic_server`` is not shut down until the number of active client connections drops to the number given by the ``proxy.config.restart.active_client_threshold`` configuration variable. .. code-block:: bash salt '*' trafficserver.restart_local salt '*' trafficserver.restart_local drain=True ''' if _TRAFFICCTL: cmd = _traffic_ctl('server', 'restart', '--manager') else: cmd = _traffic_line('-L') if drain: cmd = cmd + ['--drain'] return _subprocess(cmd)
def function[restart_local, parameter[drain]]: constant[ Restart the traffic_manager and traffic_server processes on the local node. drain This option modifies the restart behavior such that ``traffic_server`` is not shut down until the number of active client connections drops to the number given by the ``proxy.config.restart.active_client_threshold`` configuration variable. .. code-block:: bash salt '*' trafficserver.restart_local salt '*' trafficserver.restart_local drain=True ] if name[_TRAFFICCTL] begin[:] variable[cmd] assign[=] call[name[_traffic_ctl], parameter[constant[server], constant[restart], constant[--manager]]] if name[drain] begin[:] variable[cmd] assign[=] binary_operation[name[cmd] + list[[<ast.Constant object at 0x7da1b26ae560>]]] return[call[name[_subprocess], parameter[name[cmd]]]]
keyword[def] identifier[restart_local] ( identifier[drain] = keyword[False] ): literal[string] keyword[if] identifier[_TRAFFICCTL] : identifier[cmd] = identifier[_traffic_ctl] ( literal[string] , literal[string] , literal[string] ) keyword[else] : identifier[cmd] = identifier[_traffic_line] ( literal[string] ) keyword[if] identifier[drain] : identifier[cmd] = identifier[cmd] +[ literal[string] ] keyword[return] identifier[_subprocess] ( identifier[cmd] )
def restart_local(drain=False): """ Restart the traffic_manager and traffic_server processes on the local node. drain This option modifies the restart behavior such that ``traffic_server`` is not shut down until the number of active client connections drops to the number given by the ``proxy.config.restart.active_client_threshold`` configuration variable. .. code-block:: bash salt '*' trafficserver.restart_local salt '*' trafficserver.restart_local drain=True """ if _TRAFFICCTL: cmd = _traffic_ctl('server', 'restart', '--manager') # depends on [control=['if'], data=[]] else: cmd = _traffic_line('-L') if drain: cmd = cmd + ['--drain'] # depends on [control=['if'], data=[]] return _subprocess(cmd)
def walk_files(mgr): """ Iterate over all files visible to ``mgr``. """ for dir_, subdirs, files in walk_files(mgr): for file_ in files: yield file_
def function[walk_files, parameter[mgr]]: constant[ Iterate over all files visible to ``mgr``. ] for taget[tuple[[<ast.Name object at 0x7da20c6e4a90>, <ast.Name object at 0x7da18ede4f10>, <ast.Name object at 0x7da18ede4a60>]]] in starred[call[name[walk_files], parameter[name[mgr]]]] begin[:] for taget[name[file_]] in starred[name[files]] begin[:] <ast.Yield object at 0x7da18ede4100>
keyword[def] identifier[walk_files] ( identifier[mgr] ): literal[string] keyword[for] identifier[dir_] , identifier[subdirs] , identifier[files] keyword[in] identifier[walk_files] ( identifier[mgr] ): keyword[for] identifier[file_] keyword[in] identifier[files] : keyword[yield] identifier[file_]
def walk_files(mgr): """ Iterate over all files visible to ``mgr``. """ for (dir_, subdirs, files) in walk_files(mgr): for file_ in files: yield file_ # depends on [control=['for'], data=['file_']] # depends on [control=['for'], data=[]]
def setup_model(self,model,org_model_ws,new_model_ws): """ setup the flopy.mbase instance for use with multipler parameters. Changes model_ws, sets external_path and writes new MODFLOW input files Parameters ---------- model : flopy.mbase flopy model instance org_model_ws : str the orginal model working space new_model_ws : str the new model working space """ split_new_mws = [i for i in os.path.split(new_model_ws) if len(i) > 0] if len(split_new_mws) != 1: self.logger.lraise("new_model_ws can only be 1 folder-level deep:{0}". format(str(split_new_mws))) if isinstance(model,str): self.log("loading flopy model") try: import flopy except: raise Exception("from_flopy_model() requires flopy") # prepare the flopy model self.org_model_ws = org_model_ws self.new_model_ws = new_model_ws self.m = flopy.modflow.Modflow.load(model,model_ws=org_model_ws, check=False,verbose=True,forgive=False) self.log("loading flopy model") else: self.m = model self.org_model_ws = str(self.m.model_ws) self.new_model_ws = new_model_ws self.log("updating model attributes") self.m.array_free_format = True self.m.free_format_input = True self.m.external_path = '.' self.log("updating model attributes") if os.path.exists(new_model_ws): if not self.remove_existing: self.logger.lraise("'new_model_ws' already exists") else: self.logger.warn("removing existing 'new_model_ws") shutil.rmtree(new_model_ws,onerror=pyemu.os_utils.remove_readonly) time.sleep(1) self.m.change_model_ws(new_model_ws,reset_external=True) self.m.exe_name = self.m.exe_name.replace(".exe",'') self.m.exe = self.m.version self.log("writing new modflow input files") self.m.write_input() self.log("writing new modflow input files")
def function[setup_model, parameter[self, model, org_model_ws, new_model_ws]]: constant[ setup the flopy.mbase instance for use with multipler parameters. Changes model_ws, sets external_path and writes new MODFLOW input files Parameters ---------- model : flopy.mbase flopy model instance org_model_ws : str the orginal model working space new_model_ws : str the new model working space ] variable[split_new_mws] assign[=] <ast.ListComp object at 0x7da1b1d96fe0> if compare[call[name[len], parameter[name[split_new_mws]]] not_equal[!=] constant[1]] begin[:] call[name[self].logger.lraise, parameter[call[constant[new_model_ws can only be 1 folder-level deep:{0}].format, parameter[call[name[str], parameter[name[split_new_mws]]]]]]] if call[name[isinstance], parameter[name[model], name[str]]] begin[:] call[name[self].log, parameter[constant[loading flopy model]]] <ast.Try object at 0x7da1b1d95000> name[self].org_model_ws assign[=] name[org_model_ws] name[self].new_model_ws assign[=] name[new_model_ws] name[self].m assign[=] call[name[flopy].modflow.Modflow.load, parameter[name[model]]] call[name[self].log, parameter[constant[loading flopy model]]] call[name[self].log, parameter[constant[updating model attributes]]] name[self].m.array_free_format assign[=] constant[True] name[self].m.free_format_input assign[=] constant[True] name[self].m.external_path assign[=] constant[.] call[name[self].log, parameter[constant[updating model attributes]]] if call[name[os].path.exists, parameter[name[new_model_ws]]] begin[:] if <ast.UnaryOp object at 0x7da1b1d6e6b0> begin[:] call[name[self].logger.lraise, parameter[constant['new_model_ws' already exists]]] call[name[self].m.change_model_ws, parameter[name[new_model_ws]]] name[self].m.exe_name assign[=] call[name[self].m.exe_name.replace, parameter[constant[.exe], constant[]]] name[self].m.exe assign[=] name[self].m.version call[name[self].log, parameter[constant[writing new modflow input files]]] call[name[self].m.write_input, parameter[]] call[name[self].log, parameter[constant[writing new modflow input files]]]
keyword[def] identifier[setup_model] ( identifier[self] , identifier[model] , identifier[org_model_ws] , identifier[new_model_ws] ): literal[string] identifier[split_new_mws] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[os] . identifier[path] . identifier[split] ( identifier[new_model_ws] ) keyword[if] identifier[len] ( identifier[i] )> literal[int] ] keyword[if] identifier[len] ( identifier[split_new_mws] )!= literal[int] : identifier[self] . identifier[logger] . identifier[lraise] ( literal[string] . identifier[format] ( identifier[str] ( identifier[split_new_mws] ))) keyword[if] identifier[isinstance] ( identifier[model] , identifier[str] ): identifier[self] . identifier[log] ( literal[string] ) keyword[try] : keyword[import] identifier[flopy] keyword[except] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[self] . identifier[org_model_ws] = identifier[org_model_ws] identifier[self] . identifier[new_model_ws] = identifier[new_model_ws] identifier[self] . identifier[m] = identifier[flopy] . identifier[modflow] . identifier[Modflow] . identifier[load] ( identifier[model] , identifier[model_ws] = identifier[org_model_ws] , identifier[check] = keyword[False] , identifier[verbose] = keyword[True] , identifier[forgive] = keyword[False] ) identifier[self] . identifier[log] ( literal[string] ) keyword[else] : identifier[self] . identifier[m] = identifier[model] identifier[self] . identifier[org_model_ws] = identifier[str] ( identifier[self] . identifier[m] . identifier[model_ws] ) identifier[self] . identifier[new_model_ws] = identifier[new_model_ws] identifier[self] . identifier[log] ( literal[string] ) identifier[self] . identifier[m] . identifier[array_free_format] = keyword[True] identifier[self] . identifier[m] . identifier[free_format_input] = keyword[True] identifier[self] . identifier[m] . identifier[external_path] = literal[string] identifier[self] . identifier[log] ( literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[new_model_ws] ): keyword[if] keyword[not] identifier[self] . identifier[remove_existing] : identifier[self] . identifier[logger] . identifier[lraise] ( literal[string] ) keyword[else] : identifier[self] . identifier[logger] . identifier[warn] ( literal[string] ) identifier[shutil] . identifier[rmtree] ( identifier[new_model_ws] , identifier[onerror] = identifier[pyemu] . identifier[os_utils] . identifier[remove_readonly] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[self] . identifier[m] . identifier[change_model_ws] ( identifier[new_model_ws] , identifier[reset_external] = keyword[True] ) identifier[self] . identifier[m] . identifier[exe_name] = identifier[self] . identifier[m] . identifier[exe_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[self] . identifier[m] . identifier[exe] = identifier[self] . identifier[m] . identifier[version] identifier[self] . identifier[log] ( literal[string] ) identifier[self] . identifier[m] . identifier[write_input] () identifier[self] . identifier[log] ( literal[string] )
def setup_model(self, model, org_model_ws, new_model_ws): """ setup the flopy.mbase instance for use with multipler parameters. Changes model_ws, sets external_path and writes new MODFLOW input files Parameters ---------- model : flopy.mbase flopy model instance org_model_ws : str the orginal model working space new_model_ws : str the new model working space """ split_new_mws = [i for i in os.path.split(new_model_ws) if len(i) > 0] if len(split_new_mws) != 1: self.logger.lraise('new_model_ws can only be 1 folder-level deep:{0}'.format(str(split_new_mws))) # depends on [control=['if'], data=[]] if isinstance(model, str): self.log('loading flopy model') try: import flopy # depends on [control=['try'], data=[]] except: raise Exception('from_flopy_model() requires flopy') # depends on [control=['except'], data=[]] # prepare the flopy model self.org_model_ws = org_model_ws self.new_model_ws = new_model_ws self.m = flopy.modflow.Modflow.load(model, model_ws=org_model_ws, check=False, verbose=True, forgive=False) self.log('loading flopy model') # depends on [control=['if'], data=[]] else: self.m = model self.org_model_ws = str(self.m.model_ws) self.new_model_ws = new_model_ws self.log('updating model attributes') self.m.array_free_format = True self.m.free_format_input = True self.m.external_path = '.' self.log('updating model attributes') if os.path.exists(new_model_ws): if not self.remove_existing: self.logger.lraise("'new_model_ws' already exists") # depends on [control=['if'], data=[]] else: self.logger.warn("removing existing 'new_model_ws") shutil.rmtree(new_model_ws, onerror=pyemu.os_utils.remove_readonly) time.sleep(1) # depends on [control=['if'], data=[]] self.m.change_model_ws(new_model_ws, reset_external=True) self.m.exe_name = self.m.exe_name.replace('.exe', '') self.m.exe = self.m.version self.log('writing new modflow input files') self.m.write_input() self.log('writing new modflow input files')
def create_dataset(group, name, data, units='', datatype=DataTypes.UNDEFINED, chunks=True, maxshape=None, compression=None, **attributes): """Create an ARF dataset under group, setting required attributes Required arguments: name -- the name of dataset in which to store the data data -- the data to store Data can be of the following types: * sampled data: an N-D numerical array of measurements * "simple" event data: a 1-D array of times * "complex" event data: a 1-D array of records, with field 'start' required Optional arguments: datatype -- a code defining the nature of the data in the channel units -- channel units (optional for sampled data, otherwise required) sampling_rate -- required for sampled data and event data with units=='samples' Arguments passed to h5py: maxshape -- make the node resizable up to this shape. Use None for axes that need to be unlimited. chunks -- specify the chunk size. The optimal chunk size depends on the intended use of the data. For single-channel sampled data the auto-chunking (True) is probably best. compression -- compression strategy. Can be 'gzip', 'szip', 'lzf' or an integer in range(10) specifying gzip(N). Only gzip is really portable. Additional arguments are set as attributes on the created dataset Returns the created dataset """ from numpy import asarray srate = attributes.get('sampling_rate', None) # check data validity before doing anything if not hasattr(data, 'dtype'): data = asarray(data) if data.dtype.kind in ('S', 'O', 'U'): raise ValueError( "data must be in array with numeric or compound type") if data.dtype.kind == 'V': if 'start' not in data.dtype.names: raise ValueError("complex event data requires 'start' field") if not isinstance(units, (list, tuple)): raise ValueError("complex event data requires sequence of units") if not len(units) == len(data.dtype.names): raise ValueError("number of units doesn't match number of fields") if units == '': if srate is None or not srate > 0: raise ValueError( "unitless data assumed time series and requires sampling_rate attribute") elif units == 'samples': if srate is None or not srate > 0: raise ValueError( "data with units of 'samples' requires sampling_rate attribute") # NB: can't really catch case where sampled data has units but doesn't # have sampling_rate attribute dset = group.create_dataset( name, data=data, maxshape=maxshape, chunks=chunks, compression=compression) set_attributes(dset, units=units, datatype=datatype, **attributes) return dset
def function[create_dataset, parameter[group, name, data, units, datatype, chunks, maxshape, compression]]: constant[Create an ARF dataset under group, setting required attributes Required arguments: name -- the name of dataset in which to store the data data -- the data to store Data can be of the following types: * sampled data: an N-D numerical array of measurements * "simple" event data: a 1-D array of times * "complex" event data: a 1-D array of records, with field 'start' required Optional arguments: datatype -- a code defining the nature of the data in the channel units -- channel units (optional for sampled data, otherwise required) sampling_rate -- required for sampled data and event data with units=='samples' Arguments passed to h5py: maxshape -- make the node resizable up to this shape. Use None for axes that need to be unlimited. chunks -- specify the chunk size. The optimal chunk size depends on the intended use of the data. For single-channel sampled data the auto-chunking (True) is probably best. compression -- compression strategy. Can be 'gzip', 'szip', 'lzf' or an integer in range(10) specifying gzip(N). Only gzip is really portable. Additional arguments are set as attributes on the created dataset Returns the created dataset ] from relative_module[numpy] import module[asarray] variable[srate] assign[=] call[name[attributes].get, parameter[constant[sampling_rate], constant[None]]] if <ast.UnaryOp object at 0x7da1b117ae90> begin[:] variable[data] assign[=] call[name[asarray], parameter[name[data]]] if compare[name[data].dtype.kind in tuple[[<ast.Constant object at 0x7da1b1179de0>, <ast.Constant object at 0x7da1b1179cc0>, <ast.Constant object at 0x7da1b117add0>]]] begin[:] <ast.Raise object at 0x7da1b117b520> if compare[name[data].dtype.kind equal[==] constant[V]] begin[:] if compare[constant[start] <ast.NotIn object at 0x7da2590d7190> name[data].dtype.names] begin[:] <ast.Raise object at 0x7da1b117b640> if <ast.UnaryOp object at 0x7da1b117abc0> begin[:] <ast.Raise object at 0x7da1b117aa10> if <ast.UnaryOp object at 0x7da1b117ae60> begin[:] <ast.Raise object at 0x7da1b2346320> if compare[name[units] equal[==] constant[]] begin[:] if <ast.BoolOp object at 0x7da1b2346860> begin[:] <ast.Raise object at 0x7da1b2344f10> variable[dset] assign[=] call[name[group].create_dataset, parameter[name[name]]] call[name[set_attributes], parameter[name[dset]]] return[name[dset]]
keyword[def] identifier[create_dataset] ( identifier[group] , identifier[name] , identifier[data] , identifier[units] = literal[string] , identifier[datatype] = identifier[DataTypes] . identifier[UNDEFINED] , identifier[chunks] = keyword[True] , identifier[maxshape] = keyword[None] , identifier[compression] = keyword[None] , ** identifier[attributes] ): literal[string] keyword[from] identifier[numpy] keyword[import] identifier[asarray] identifier[srate] = identifier[attributes] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[data] , literal[string] ): identifier[data] = identifier[asarray] ( identifier[data] ) keyword[if] identifier[data] . identifier[dtype] . identifier[kind] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[data] . identifier[dtype] . identifier[kind] == literal[string] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] . identifier[dtype] . identifier[names] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[units] ,( identifier[list] , identifier[tuple] )): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[len] ( identifier[units] )== identifier[len] ( identifier[data] . identifier[dtype] . identifier[names] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[units] == literal[string] : keyword[if] identifier[srate] keyword[is] keyword[None] keyword[or] keyword[not] identifier[srate] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[elif] identifier[units] == literal[string] : keyword[if] identifier[srate] keyword[is] keyword[None] keyword[or] keyword[not] identifier[srate] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[dset] = identifier[group] . identifier[create_dataset] ( identifier[name] , identifier[data] = identifier[data] , identifier[maxshape] = identifier[maxshape] , identifier[chunks] = identifier[chunks] , identifier[compression] = identifier[compression] ) identifier[set_attributes] ( identifier[dset] , identifier[units] = identifier[units] , identifier[datatype] = identifier[datatype] ,** identifier[attributes] ) keyword[return] identifier[dset]
def create_dataset(group, name, data, units='', datatype=DataTypes.UNDEFINED, chunks=True, maxshape=None, compression=None, **attributes): """Create an ARF dataset under group, setting required attributes Required arguments: name -- the name of dataset in which to store the data data -- the data to store Data can be of the following types: * sampled data: an N-D numerical array of measurements * "simple" event data: a 1-D array of times * "complex" event data: a 1-D array of records, with field 'start' required Optional arguments: datatype -- a code defining the nature of the data in the channel units -- channel units (optional for sampled data, otherwise required) sampling_rate -- required for sampled data and event data with units=='samples' Arguments passed to h5py: maxshape -- make the node resizable up to this shape. Use None for axes that need to be unlimited. chunks -- specify the chunk size. The optimal chunk size depends on the intended use of the data. For single-channel sampled data the auto-chunking (True) is probably best. compression -- compression strategy. Can be 'gzip', 'szip', 'lzf' or an integer in range(10) specifying gzip(N). Only gzip is really portable. Additional arguments are set as attributes on the created dataset Returns the created dataset """ from numpy import asarray srate = attributes.get('sampling_rate', None) # check data validity before doing anything if not hasattr(data, 'dtype'): data = asarray(data) if data.dtype.kind in ('S', 'O', 'U'): raise ValueError('data must be in array with numeric or compound type') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if data.dtype.kind == 'V': if 'start' not in data.dtype.names: raise ValueError("complex event data requires 'start' field") # depends on [control=['if'], data=[]] if not isinstance(units, (list, tuple)): raise ValueError('complex event data requires sequence of units') # depends on [control=['if'], data=[]] if not len(units) == len(data.dtype.names): raise ValueError("number of units doesn't match number of fields") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if units == '': if srate is None or not srate > 0: raise ValueError('unitless data assumed time series and requires sampling_rate attribute') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif units == 'samples': if srate is None or not srate > 0: raise ValueError("data with units of 'samples' requires sampling_rate attribute") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # NB: can't really catch case where sampled data has units but doesn't # have sampling_rate attribute dset = group.create_dataset(name, data=data, maxshape=maxshape, chunks=chunks, compression=compression) set_attributes(dset, units=units, datatype=datatype, **attributes) return dset
def categorical_partition_data(data): """Convenience method for creating weights from categorical data. Args: data (list-like): The data from which to construct the estimate. Returns: A new partition object:: { "partition": (list) The categorical values present in the data "weights": (list) The weights of the values in the partition. } """ # Make dropna explicit (even though it defaults to true) series = pd.Series(data) value_counts = series.value_counts(dropna=True) # Compute weights using denominator only of nonnull values null_indexes = series.isnull() nonnull_count = (null_indexes == False).sum() weights = value_counts.values / nonnull_count return { "values": value_counts.index.tolist(), "weights": weights }
def function[categorical_partition_data, parameter[data]]: constant[Convenience method for creating weights from categorical data. Args: data (list-like): The data from which to construct the estimate. Returns: A new partition object:: { "partition": (list) The categorical values present in the data "weights": (list) The weights of the values in the partition. } ] variable[series] assign[=] call[name[pd].Series, parameter[name[data]]] variable[value_counts] assign[=] call[name[series].value_counts, parameter[]] variable[null_indexes] assign[=] call[name[series].isnull, parameter[]] variable[nonnull_count] assign[=] call[compare[name[null_indexes] equal[==] constant[False]].sum, parameter[]] variable[weights] assign[=] binary_operation[name[value_counts].values / name[nonnull_count]] return[dictionary[[<ast.Constant object at 0x7da1b19b9450>, <ast.Constant object at 0x7da1b19b8e50>], [<ast.Call object at 0x7da1b19ba950>, <ast.Name object at 0x7da1b19bacb0>]]]
keyword[def] identifier[categorical_partition_data] ( identifier[data] ): literal[string] identifier[series] = identifier[pd] . identifier[Series] ( identifier[data] ) identifier[value_counts] = identifier[series] . identifier[value_counts] ( identifier[dropna] = keyword[True] ) identifier[null_indexes] = identifier[series] . identifier[isnull] () identifier[nonnull_count] =( identifier[null_indexes] == keyword[False] ). identifier[sum] () identifier[weights] = identifier[value_counts] . identifier[values] / identifier[nonnull_count] keyword[return] { literal[string] : identifier[value_counts] . identifier[index] . identifier[tolist] (), literal[string] : identifier[weights] }
def categorical_partition_data(data): """Convenience method for creating weights from categorical data. Args: data (list-like): The data from which to construct the estimate. Returns: A new partition object:: { "partition": (list) The categorical values present in the data "weights": (list) The weights of the values in the partition. } """ # Make dropna explicit (even though it defaults to true) series = pd.Series(data) value_counts = series.value_counts(dropna=True) # Compute weights using denominator only of nonnull values null_indexes = series.isnull() nonnull_count = (null_indexes == False).sum() weights = value_counts.values / nonnull_count return {'values': value_counts.index.tolist(), 'weights': weights}
def write(self, keyArgs): """ Write specified key arguments into data structure. """ # bytearray doesn't work with fcntl args = array.array('B', (0,) * self.size) self._struct.pack_into(args, 0, *list(self.yieldArgs(keyArgs))) return args
def function[write, parameter[self, keyArgs]]: constant[ Write specified key arguments into data structure. ] variable[args] assign[=] call[name[array].array, parameter[constant[B], binary_operation[tuple[[<ast.Constant object at 0x7da1b28f5540>]] * name[self].size]]] call[name[self]._struct.pack_into, parameter[name[args], constant[0], <ast.Starred object at 0x7da1b28f5ed0>]] return[name[args]]
keyword[def] identifier[write] ( identifier[self] , identifier[keyArgs] ): literal[string] identifier[args] = identifier[array] . identifier[array] ( literal[string] ,( literal[int] ,)* identifier[self] . identifier[size] ) identifier[self] . identifier[_struct] . identifier[pack_into] ( identifier[args] , literal[int] ,* identifier[list] ( identifier[self] . identifier[yieldArgs] ( identifier[keyArgs] ))) keyword[return] identifier[args]
def write(self, keyArgs): """ Write specified key arguments into data structure. """ # bytearray doesn't work with fcntl args = array.array('B', (0,) * self.size) self._struct.pack_into(args, 0, *list(self.yieldArgs(keyArgs))) return args
def apply_effect(layer, image): """Apply effect to the image. ..note: Correct effect order is the following. All the effects are first applied to the original image then blended together. * dropshadow * outerglow * (original) * patternoverlay * gradientoverlay * coloroverlay * innershadow * innerglow * bevelemboss * satin * stroke """ for effect in layer.effects: if effect.__class__.__name__ == 'PatternOverlay': draw_pattern_fill(image, layer._psd, effect.value) for effect in layer.effects: if effect.__class__.__name__ == 'GradientOverlay': draw_gradient_fill(image, effect.value) for effect in layer.effects: if effect.__class__.__name__ == 'ColorOverlay': draw_solid_color_fill(image, effect.value)
def function[apply_effect, parameter[layer, image]]: constant[Apply effect to the image. ..note: Correct effect order is the following. All the effects are first applied to the original image then blended together. * dropshadow * outerglow * (original) * patternoverlay * gradientoverlay * coloroverlay * innershadow * innerglow * bevelemboss * satin * stroke ] for taget[name[effect]] in starred[name[layer].effects] begin[:] if compare[name[effect].__class__.__name__ equal[==] constant[PatternOverlay]] begin[:] call[name[draw_pattern_fill], parameter[name[image], name[layer]._psd, name[effect].value]] for taget[name[effect]] in starred[name[layer].effects] begin[:] if compare[name[effect].__class__.__name__ equal[==] constant[GradientOverlay]] begin[:] call[name[draw_gradient_fill], parameter[name[image], name[effect].value]] for taget[name[effect]] in starred[name[layer].effects] begin[:] if compare[name[effect].__class__.__name__ equal[==] constant[ColorOverlay]] begin[:] call[name[draw_solid_color_fill], parameter[name[image], name[effect].value]]
keyword[def] identifier[apply_effect] ( identifier[layer] , identifier[image] ): literal[string] keyword[for] identifier[effect] keyword[in] identifier[layer] . identifier[effects] : keyword[if] identifier[effect] . identifier[__class__] . identifier[__name__] == literal[string] : identifier[draw_pattern_fill] ( identifier[image] , identifier[layer] . identifier[_psd] , identifier[effect] . identifier[value] ) keyword[for] identifier[effect] keyword[in] identifier[layer] . identifier[effects] : keyword[if] identifier[effect] . identifier[__class__] . identifier[__name__] == literal[string] : identifier[draw_gradient_fill] ( identifier[image] , identifier[effect] . identifier[value] ) keyword[for] identifier[effect] keyword[in] identifier[layer] . identifier[effects] : keyword[if] identifier[effect] . identifier[__class__] . identifier[__name__] == literal[string] : identifier[draw_solid_color_fill] ( identifier[image] , identifier[effect] . identifier[value] )
def apply_effect(layer, image): """Apply effect to the image. ..note: Correct effect order is the following. All the effects are first applied to the original image then blended together. * dropshadow * outerglow * (original) * patternoverlay * gradientoverlay * coloroverlay * innershadow * innerglow * bevelemboss * satin * stroke """ for effect in layer.effects: if effect.__class__.__name__ == 'PatternOverlay': draw_pattern_fill(image, layer._psd, effect.value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['effect']] for effect in layer.effects: if effect.__class__.__name__ == 'GradientOverlay': draw_gradient_fill(image, effect.value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['effect']] for effect in layer.effects: if effect.__class__.__name__ == 'ColorOverlay': draw_solid_color_fill(image, effect.value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['effect']]
def get_source_var_declaration(self, var): """ Return the source mapping where the variable is declared Args: var (str): variable name Returns: (dict): sourceMapping """ return next((x.source_mapping for x in self.variables if x.name == var))
def function[get_source_var_declaration, parameter[self, var]]: constant[ Return the source mapping where the variable is declared Args: var (str): variable name Returns: (dict): sourceMapping ] return[call[name[next], parameter[<ast.GeneratorExp object at 0x7da20c7c8c10>]]]
keyword[def] identifier[get_source_var_declaration] ( identifier[self] , identifier[var] ): literal[string] keyword[return] identifier[next] (( identifier[x] . identifier[source_mapping] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[variables] keyword[if] identifier[x] . identifier[name] == identifier[var] ))
def get_source_var_declaration(self, var): """ Return the source mapping where the variable is declared Args: var (str): variable name Returns: (dict): sourceMapping """ return next((x.source_mapping for x in self.variables if x.name == var))
def _iter_step_func_decorators(self): """Find functions with step decorator in parsed file.""" for node in self.py_tree.find_all('def'): for decorator in node.decorators: if decorator.name.value == 'step': yield node, decorator break
def function[_iter_step_func_decorators, parameter[self]]: constant[Find functions with step decorator in parsed file.] for taget[name[node]] in starred[call[name[self].py_tree.find_all, parameter[constant[def]]]] begin[:] for taget[name[decorator]] in starred[name[node].decorators] begin[:] if compare[name[decorator].name.value equal[==] constant[step]] begin[:] <ast.Yield object at 0x7da1b025e6e0> break
keyword[def] identifier[_iter_step_func_decorators] ( identifier[self] ): literal[string] keyword[for] identifier[node] keyword[in] identifier[self] . identifier[py_tree] . identifier[find_all] ( literal[string] ): keyword[for] identifier[decorator] keyword[in] identifier[node] . identifier[decorators] : keyword[if] identifier[decorator] . identifier[name] . identifier[value] == literal[string] : keyword[yield] identifier[node] , identifier[decorator] keyword[break]
def _iter_step_func_decorators(self): """Find functions with step decorator in parsed file.""" for node in self.py_tree.find_all('def'): for decorator in node.decorators: if decorator.name.value == 'step': yield (node, decorator) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['decorator']] # depends on [control=['for'], data=['node']]
def route_level(root, level): """ Helper method to recurse the current node and return the specified routing node level. """ def recurse(nodes): for node in nodes: if node.level == level: routing_node.append(node) else: recurse(node) routing_node = [] recurse(root) return routing_node
def function[route_level, parameter[root, level]]: constant[ Helper method to recurse the current node and return the specified routing node level. ] def function[recurse, parameter[nodes]]: for taget[name[node]] in starred[name[nodes]] begin[:] if compare[name[node].level equal[==] name[level]] begin[:] call[name[routing_node].append, parameter[name[node]]] variable[routing_node] assign[=] list[[]] call[name[recurse], parameter[name[root]]] return[name[routing_node]]
keyword[def] identifier[route_level] ( identifier[root] , identifier[level] ): literal[string] keyword[def] identifier[recurse] ( identifier[nodes] ): keyword[for] identifier[node] keyword[in] identifier[nodes] : keyword[if] identifier[node] . identifier[level] == identifier[level] : identifier[routing_node] . identifier[append] ( identifier[node] ) keyword[else] : identifier[recurse] ( identifier[node] ) identifier[routing_node] =[] identifier[recurse] ( identifier[root] ) keyword[return] identifier[routing_node]
def route_level(root, level): """ Helper method to recurse the current node and return the specified routing node level. """ def recurse(nodes): for node in nodes: if node.level == level: routing_node.append(node) # depends on [control=['if'], data=[]] else: recurse(node) # depends on [control=['for'], data=['node']] routing_node = [] recurse(root) return routing_node
def convert_data_array(arr, filter_func=None, converter_func=None): '''Filter and convert any given data array of any dtype. Parameters ---------- arr : numpy.array Data array of any dtype. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- array of specified dimension (converter_func) and content (filter_func) ''' # if filter_func != None: # if not hasattr(filter_func, '__call__'): # raise ValueError('Filter is not callable') if filter_func: array = arr[filter_func(arr)] # Indexing with Boolean Arrays # if converter_func != None: # if not hasattr(converter_func, '__call__'): # raise ValueError('Converter is not callable') if converter_func: arr = converter_func(arr) return array
def function[convert_data_array, parameter[arr, filter_func, converter_func]]: constant[Filter and convert any given data array of any dtype. Parameters ---------- arr : numpy.array Data array of any dtype. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- array of specified dimension (converter_func) and content (filter_func) ] if name[filter_func] begin[:] variable[array] assign[=] call[name[arr]][call[name[filter_func], parameter[name[arr]]]] if name[converter_func] begin[:] variable[arr] assign[=] call[name[converter_func], parameter[name[arr]]] return[name[array]]
keyword[def] identifier[convert_data_array] ( identifier[arr] , identifier[filter_func] = keyword[None] , identifier[converter_func] = keyword[None] ): literal[string] keyword[if] identifier[filter_func] : identifier[array] = identifier[arr] [ identifier[filter_func] ( identifier[arr] )] keyword[if] identifier[converter_func] : identifier[arr] = identifier[converter_func] ( identifier[arr] ) keyword[return] identifier[array]
def convert_data_array(arr, filter_func=None, converter_func=None): """Filter and convert any given data array of any dtype. Parameters ---------- arr : numpy.array Data array of any dtype. filter_func : function Function that takes array and returns true or false for each item in array. converter_func : function Function that takes array and returns an array or tuple of arrays. Returns ------- array of specified dimension (converter_func) and content (filter_func) """ # if filter_func != None: # if not hasattr(filter_func, '__call__'): # raise ValueError('Filter is not callable') if filter_func: array = arr[filter_func(arr)] # Indexing with Boolean Arrays # depends on [control=['if'], data=[]] # if converter_func != None: # if not hasattr(converter_func, '__call__'): # raise ValueError('Converter is not callable') if converter_func: arr = converter_func(arr) # depends on [control=['if'], data=[]] return array
def generate_help(self, filename="", command=""): """ :type command str """ "{} [{}]\n\n".format(filename, "|".join(self.available_command_list)) help_str = """Available commands: """ command_list = self.available_command_list if command == "" else [command] for command in command_list: cmd_meta = self.get_command_metainfo(command) """:type cmd_meta ModuleMetaInfo""" if cmd_meta is None: continue args = {} args.update(cmd_meta.get_arguments_builder().arguments) args.update(cmd_meta.get_arguments_builder().default_arguments) cmd_arguments_help = {name: value.item_help for name, value in args.items() if value.item_help} if len(cmd_arguments_help) > 0: help_str += """ {cmd} [{args}] - {cmd_help} Argument details: {arg_details} """.format( cmd=command, args=" | ".join(cmd_arguments_help.keys()), cmd_help=cmd_meta.help, arg_details="\n".join(["{} - {}".format(k, v) for k, v in cmd_arguments_help.items()]) ) else: help_str += """ {cmd} - {cmd_help}""".format( cmd=command, cmd_help=cmd_meta.help ) return help_str
def function[generate_help, parameter[self, filename, command]]: constant[ :type command str ] call[constant[{} [{}] ].format, parameter[name[filename], call[constant[|].join, parameter[name[self].available_command_list]]]] variable[help_str] assign[=] constant[Available commands: ] variable[command_list] assign[=] <ast.IfExp object at 0x7da20c76da20> for taget[name[command]] in starred[name[command_list]] begin[:] variable[cmd_meta] assign[=] call[name[self].get_command_metainfo, parameter[name[command]]] constant[:type cmd_meta ModuleMetaInfo] if compare[name[cmd_meta] is constant[None]] begin[:] continue variable[args] assign[=] dictionary[[], []] call[name[args].update, parameter[call[name[cmd_meta].get_arguments_builder, parameter[]].arguments]] call[name[args].update, parameter[call[name[cmd_meta].get_arguments_builder, parameter[]].default_arguments]] variable[cmd_arguments_help] assign[=] <ast.DictComp object at 0x7da20c76c280> if compare[call[name[len], parameter[name[cmd_arguments_help]]] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da20c76d150> return[name[help_str]]
keyword[def] identifier[generate_help] ( identifier[self] , identifier[filename] = literal[string] , identifier[command] = literal[string] ): literal[string] literal[string] . identifier[format] ( identifier[filename] , literal[string] . identifier[join] ( identifier[self] . identifier[available_command_list] )) identifier[help_str] = literal[string] identifier[command_list] = identifier[self] . identifier[available_command_list] keyword[if] identifier[command] == literal[string] keyword[else] [ identifier[command] ] keyword[for] identifier[command] keyword[in] identifier[command_list] : identifier[cmd_meta] = identifier[self] . identifier[get_command_metainfo] ( identifier[command] ) literal[string] keyword[if] identifier[cmd_meta] keyword[is] keyword[None] : keyword[continue] identifier[args] ={} identifier[args] . identifier[update] ( identifier[cmd_meta] . identifier[get_arguments_builder] (). identifier[arguments] ) identifier[args] . identifier[update] ( identifier[cmd_meta] . identifier[get_arguments_builder] (). identifier[default_arguments] ) identifier[cmd_arguments_help] ={ identifier[name] : identifier[value] . identifier[item_help] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[args] . identifier[items] () keyword[if] identifier[value] . identifier[item_help] } keyword[if] identifier[len] ( identifier[cmd_arguments_help] )> literal[int] : identifier[help_str] += literal[string] . identifier[format] ( identifier[cmd] = identifier[command] , identifier[args] = literal[string] . identifier[join] ( identifier[cmd_arguments_help] . identifier[keys] ()), identifier[cmd_help] = identifier[cmd_meta] . identifier[help] , identifier[arg_details] = literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[k] , identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[cmd_arguments_help] . identifier[items] ()]) ) keyword[else] : identifier[help_str] += literal[string] . identifier[format] ( identifier[cmd] = identifier[command] , identifier[cmd_help] = identifier[cmd_meta] . identifier[help] ) keyword[return] identifier[help_str]
def generate_help(self, filename='', command=''): """ :type command str """ '{} [{}]\n\n'.format(filename, '|'.join(self.available_command_list)) help_str = 'Available commands:\n\n ' command_list = self.available_command_list if command == '' else [command] for command in command_list: cmd_meta = self.get_command_metainfo(command) ':type cmd_meta ModuleMetaInfo' if cmd_meta is None: continue # depends on [control=['if'], data=[]] args = {} args.update(cmd_meta.get_arguments_builder().arguments) args.update(cmd_meta.get_arguments_builder().default_arguments) cmd_arguments_help = {name: value.item_help for (name, value) in args.items() if value.item_help} if len(cmd_arguments_help) > 0: help_str += '\n {cmd} [{args}] - {cmd_help}\n\n Argument details:\n {arg_details}\n\n\n '.format(cmd=command, args=' | '.join(cmd_arguments_help.keys()), cmd_help=cmd_meta.help, arg_details='\n'.join(['{} - {}'.format(k, v) for (k, v) in cmd_arguments_help.items()])) # depends on [control=['if'], data=[]] else: help_str += '\n {cmd} - {cmd_help}'.format(cmd=command, cmd_help=cmd_meta.help) # depends on [control=['for'], data=['command']] return help_str
def destroy(self, request, *args, **kwargs): """ Deletion of a project is done through sending a **DELETE** request to the project instance URI. Please note, that if a project has connected instances, deletion request will fail with 409 response code. Valid request example (token is user specific): .. code-block:: http DELETE /api/projects/6c9b01c251c24174a6691a1f894fae31/ HTTP/1.1 Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com """ return super(ProjectViewSet, self).destroy(request, *args, **kwargs)
def function[destroy, parameter[self, request]]: constant[ Deletion of a project is done through sending a **DELETE** request to the project instance URI. Please note, that if a project has connected instances, deletion request will fail with 409 response code. Valid request example (token is user specific): .. code-block:: http DELETE /api/projects/6c9b01c251c24174a6691a1f894fae31/ HTTP/1.1 Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com ] return[call[call[name[super], parameter[name[ProjectViewSet], name[self]]].destroy, parameter[name[request], <ast.Starred object at 0x7da1b0f39e10>]]]
keyword[def] identifier[destroy] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[super] ( identifier[ProjectViewSet] , identifier[self] ). identifier[destroy] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
def destroy(self, request, *args, **kwargs): """ Deletion of a project is done through sending a **DELETE** request to the project instance URI. Please note, that if a project has connected instances, deletion request will fail with 409 response code. Valid request example (token is user specific): .. code-block:: http DELETE /api/projects/6c9b01c251c24174a6691a1f894fae31/ HTTP/1.1 Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com """ return super(ProjectViewSet, self).destroy(request, *args, **kwargs)
def slt(computation: BaseComputation) -> None: """ Signed Lesser Comparison """ left, right = map( unsigned_to_signed, computation.stack_pop(num_items=2, type_hint=constants.UINT256), ) if left < right: result = 1 else: result = 0 computation.stack_push(signed_to_unsigned(result))
def function[slt, parameter[computation]]: constant[ Signed Lesser Comparison ] <ast.Tuple object at 0x7da1b17f93f0> assign[=] call[name[map], parameter[name[unsigned_to_signed], call[name[computation].stack_pop, parameter[]]]] if compare[name[left] less[<] name[right]] begin[:] variable[result] assign[=] constant[1] call[name[computation].stack_push, parameter[call[name[signed_to_unsigned], parameter[name[result]]]]]
keyword[def] identifier[slt] ( identifier[computation] : identifier[BaseComputation] )-> keyword[None] : literal[string] identifier[left] , identifier[right] = identifier[map] ( identifier[unsigned_to_signed] , identifier[computation] . identifier[stack_pop] ( identifier[num_items] = literal[int] , identifier[type_hint] = identifier[constants] . identifier[UINT256] ), ) keyword[if] identifier[left] < identifier[right] : identifier[result] = literal[int] keyword[else] : identifier[result] = literal[int] identifier[computation] . identifier[stack_push] ( identifier[signed_to_unsigned] ( identifier[result] ))
def slt(computation: BaseComputation) -> None: """ Signed Lesser Comparison """ (left, right) = map(unsigned_to_signed, computation.stack_pop(num_items=2, type_hint=constants.UINT256)) if left < right: result = 1 # depends on [control=['if'], data=[]] else: result = 0 computation.stack_push(signed_to_unsigned(result))
def warning(self, message, print_location=True): """Displays warning message. Uses exshared for current location of parsing""" msg = "Warning" if print_location and (exshared.location != None): wline = lineno(exshared.location, exshared.text) wcol = col(exshared.location, exshared.text) wtext = line(exshared.location, exshared.text) msg += " at line %d, col %d" % (wline, wcol) msg += ": %s" % message if print_location and (exshared.location != None): msg += "\n%s" % wtext print(msg)
def function[warning, parameter[self, message, print_location]]: constant[Displays warning message. Uses exshared for current location of parsing] variable[msg] assign[=] constant[Warning] if <ast.BoolOp object at 0x7da18c4cf100> begin[:] variable[wline] assign[=] call[name[lineno], parameter[name[exshared].location, name[exshared].text]] variable[wcol] assign[=] call[name[col], parameter[name[exshared].location, name[exshared].text]] variable[wtext] assign[=] call[name[line], parameter[name[exshared].location, name[exshared].text]] <ast.AugAssign object at 0x7da18f09f820> <ast.AugAssign object at 0x7da18f09e8f0> if <ast.BoolOp object at 0x7da18f09ed40> begin[:] <ast.AugAssign object at 0x7da18f09cdc0> call[name[print], parameter[name[msg]]]
keyword[def] identifier[warning] ( identifier[self] , identifier[message] , identifier[print_location] = keyword[True] ): literal[string] identifier[msg] = literal[string] keyword[if] identifier[print_location] keyword[and] ( identifier[exshared] . identifier[location] != keyword[None] ): identifier[wline] = identifier[lineno] ( identifier[exshared] . identifier[location] , identifier[exshared] . identifier[text] ) identifier[wcol] = identifier[col] ( identifier[exshared] . identifier[location] , identifier[exshared] . identifier[text] ) identifier[wtext] = identifier[line] ( identifier[exshared] . identifier[location] , identifier[exshared] . identifier[text] ) identifier[msg] += literal[string] %( identifier[wline] , identifier[wcol] ) identifier[msg] += literal[string] % identifier[message] keyword[if] identifier[print_location] keyword[and] ( identifier[exshared] . identifier[location] != keyword[None] ): identifier[msg] += literal[string] % identifier[wtext] identifier[print] ( identifier[msg] )
def warning(self, message, print_location=True): """Displays warning message. Uses exshared for current location of parsing""" msg = 'Warning' if print_location and exshared.location != None: wline = lineno(exshared.location, exshared.text) wcol = col(exshared.location, exshared.text) wtext = line(exshared.location, exshared.text) msg += ' at line %d, col %d' % (wline, wcol) # depends on [control=['if'], data=[]] msg += ': %s' % message if print_location and exshared.location != None: msg += '\n%s' % wtext # depends on [control=['if'], data=[]] print(msg)
def as_graph(self, depth=0): """ Create a graph with self as node, cache it, return it. Args: depth (int): depth of the graph. Returns: Graph: an instance of Graph. """ if depth in self._graph_cache: return self._graph_cache[depth] self._graph_cache[depth] = graph = Graph(self, depth=depth) return graph
def function[as_graph, parameter[self, depth]]: constant[ Create a graph with self as node, cache it, return it. Args: depth (int): depth of the graph. Returns: Graph: an instance of Graph. ] if compare[name[depth] in name[self]._graph_cache] begin[:] return[call[name[self]._graph_cache][name[depth]]] call[name[self]._graph_cache][name[depth]] assign[=] call[name[Graph], parameter[name[self]]] return[name[graph]]
keyword[def] identifier[as_graph] ( identifier[self] , identifier[depth] = literal[int] ): literal[string] keyword[if] identifier[depth] keyword[in] identifier[self] . identifier[_graph_cache] : keyword[return] identifier[self] . identifier[_graph_cache] [ identifier[depth] ] identifier[self] . identifier[_graph_cache] [ identifier[depth] ]= identifier[graph] = identifier[Graph] ( identifier[self] , identifier[depth] = identifier[depth] ) keyword[return] identifier[graph]
def as_graph(self, depth=0): """ Create a graph with self as node, cache it, return it. Args: depth (int): depth of the graph. Returns: Graph: an instance of Graph. """ if depth in self._graph_cache: return self._graph_cache[depth] # depends on [control=['if'], data=['depth']] self._graph_cache[depth] = graph = Graph(self, depth=depth) return graph
def posthoc_nemenyi_friedman(a, y_col=None, block_col=None, group_col=None, melted=False, sort=False): '''Calculate pairwise comparisons using Nemenyi post hoc test for unreplicated blocked data. This test is usually conducted post hoc if significant results of the Friedman's test are obtained. The statistics refer to upper quantiles of the studentized range distribution (Tukey) [1]_, [2]_, [3]_. Parameters ---------- a : array_like or pandas DataFrame object An array, any object exposing the array interface or a pandas DataFrame. If `melted` is set to False (default), `a` is a typical matrix of block design, i.e. rows are blocks, and columns are groups. In this case you do not need to specify col arguments. If `a` is an array and `melted` is set to True, y_col, block_col and group_col must specify the indices of columns containing elements of correspondary type. If `a` is a Pandas DataFrame and `melted` is set to True, y_col, block_col and group_col must specify columns names (strings). y_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains y data. block_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains blocking factor values. group_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains treatment (group) factor values. melted : bool, optional Specifies if data are given as melted columns "y", "blocks", and "groups". sort : bool, optional If True, sort data by block and group columns. Returns ------- result : pandas DataFrame P values. Notes ----- A one-way ANOVA with repeated measures that is also referred to as ANOVA with unreplicated block design can also be conducted via Friedman's test. The consequent post hoc pairwise multiple comparison test according to Nemenyi is conducted with this function. This function does not test for ties. References ---------- .. [1] J. Demsar (2006), Statistical comparisons of classifiers over multiple data sets, Journal of Machine Learning Research, 7, 1-30. .. [2] P. Nemenyi (1963) Distribution-free Multiple Comparisons. Ph.D. thesis, Princeton University. .. [3] L. Sachs (1997), Angewandte Statistik. Berlin: Springer. Pages: 668-675. Examples -------- >>> # Non-melted case, x is a block design matrix, i.e. rows are blocks >>> # and columns are groups. >>> x = np.array([[31,27,24],[31,28,31],[45,29,46],[21,18,48],[42,36,46],[32,17,40]]) >>> sp.posthoc_nemenyi_friedman(x) ''' if melted and not all([block_col, group_col, y_col]): raise ValueError('block_col, group_col, y_col should be explicitly specified if using melted data') def compare_stats(i, j): dif = np.abs(R[groups[i]] - R[groups[j]]) qval = dif / np.sqrt(k * (k + 1.) / (6. * n)) return qval x, _y_col, _group_col, _block_col = __convert_to_block_df(a, y_col, group_col, block_col, melted) #if not sort: # x[group_col] = Categorical(x[group_col], categories=x[group_col].unique(), ordered=True) # x[block_col] = Categorical(x[block_col], categories=x[block_col].unique(), ordered=True) x.sort_values(by=[_group_col, _block_col], ascending=True, inplace=True) x.dropna(inplace=True) groups = x[_group_col].unique() k = groups.size n = x[_block_col].unique().size x['mat'] = x.groupby(_block_col)[_y_col].rank() R = x.groupby(_group_col)['mat'].mean() vs = np.zeros((k, k)) combs = it.combinations(range(k), 2) tri_upper = np.triu_indices(vs.shape[0], 1) tri_lower = np.tril_indices(vs.shape[0], -1) vs[:,:] = 0 for i, j in combs: vs[i, j] = compare_stats(i, j) vs *= np.sqrt(2.) vs[tri_upper] = psturng(vs[tri_upper], k, np.inf) vs[tri_lower] = vs.T[tri_lower] np.fill_diagonal(vs, -1) return DataFrame(vs, index=groups, columns=groups)
def function[posthoc_nemenyi_friedman, parameter[a, y_col, block_col, group_col, melted, sort]]: constant[Calculate pairwise comparisons using Nemenyi post hoc test for unreplicated blocked data. This test is usually conducted post hoc if significant results of the Friedman's test are obtained. The statistics refer to upper quantiles of the studentized range distribution (Tukey) [1]_, [2]_, [3]_. Parameters ---------- a : array_like or pandas DataFrame object An array, any object exposing the array interface or a pandas DataFrame. If `melted` is set to False (default), `a` is a typical matrix of block design, i.e. rows are blocks, and columns are groups. In this case you do not need to specify col arguments. If `a` is an array and `melted` is set to True, y_col, block_col and group_col must specify the indices of columns containing elements of correspondary type. If `a` is a Pandas DataFrame and `melted` is set to True, y_col, block_col and group_col must specify columns names (strings). y_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains y data. block_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains blocking factor values. group_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains treatment (group) factor values. melted : bool, optional Specifies if data are given as melted columns "y", "blocks", and "groups". sort : bool, optional If True, sort data by block and group columns. Returns ------- result : pandas DataFrame P values. Notes ----- A one-way ANOVA with repeated measures that is also referred to as ANOVA with unreplicated block design can also be conducted via Friedman's test. The consequent post hoc pairwise multiple comparison test according to Nemenyi is conducted with this function. This function does not test for ties. References ---------- .. [1] J. Demsar (2006), Statistical comparisons of classifiers over multiple data sets, Journal of Machine Learning Research, 7, 1-30. .. [2] P. Nemenyi (1963) Distribution-free Multiple Comparisons. Ph.D. thesis, Princeton University. .. [3] L. Sachs (1997), Angewandte Statistik. Berlin: Springer. Pages: 668-675. Examples -------- >>> # Non-melted case, x is a block design matrix, i.e. rows are blocks >>> # and columns are groups. >>> x = np.array([[31,27,24],[31,28,31],[45,29,46],[21,18,48],[42,36,46],[32,17,40]]) >>> sp.posthoc_nemenyi_friedman(x) ] if <ast.BoolOp object at 0x7da1b1173af0> begin[:] <ast.Raise object at 0x7da1b1173940> def function[compare_stats, parameter[i, j]]: variable[dif] assign[=] call[name[np].abs, parameter[binary_operation[call[name[R]][call[name[groups]][name[i]]] - call[name[R]][call[name[groups]][name[j]]]]]] variable[qval] assign[=] binary_operation[name[dif] / call[name[np].sqrt, parameter[binary_operation[binary_operation[name[k] * binary_operation[name[k] + constant[1.0]]] / binary_operation[constant[6.0] * name[n]]]]]] return[name[qval]] <ast.Tuple object at 0x7da1b1173100> assign[=] call[name[__convert_to_block_df], parameter[name[a], name[y_col], name[group_col], name[block_col], name[melted]]] call[name[x].sort_values, parameter[]] call[name[x].dropna, parameter[]] variable[groups] assign[=] call[call[name[x]][name[_group_col]].unique, parameter[]] variable[k] assign[=] name[groups].size variable[n] assign[=] call[call[name[x]][name[_block_col]].unique, parameter[]].size call[name[x]][constant[mat]] assign[=] call[call[call[name[x].groupby, parameter[name[_block_col]]]][name[_y_col]].rank, parameter[]] variable[R] assign[=] call[call[call[name[x].groupby, parameter[name[_group_col]]]][constant[mat]].mean, parameter[]] variable[vs] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1170670>, <ast.Name object at 0x7da1b11706a0>]]]] variable[combs] assign[=] call[name[it].combinations, parameter[call[name[range], parameter[name[k]]], constant[2]]] variable[tri_upper] assign[=] call[name[np].triu_indices, parameter[call[name[vs].shape][constant[0]], constant[1]]] variable[tri_lower] assign[=] call[name[np].tril_indices, parameter[call[name[vs].shape][constant[0]], <ast.UnaryOp object at 0x7da1b1170c10>]] call[name[vs]][tuple[[<ast.Slice object at 0x7da1b1170d30>, <ast.Slice object at 0x7da1b1170d60>]]] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b1170e20>, <ast.Name object at 0x7da1b1170e50>]]] in starred[name[combs]] begin[:] call[name[vs]][tuple[[<ast.Name object at 0x7da1b1170f70>, <ast.Name object at 0x7da1b1170fa0>]]] assign[=] call[name[compare_stats], parameter[name[i], name[j]]] <ast.AugAssign object at 0x7da1b1171090> call[name[vs]][name[tri_upper]] assign[=] call[name[psturng], parameter[call[name[vs]][name[tri_upper]], name[k], name[np].inf]] call[name[vs]][name[tri_lower]] assign[=] call[name[vs].T][name[tri_lower]] call[name[np].fill_diagonal, parameter[name[vs], <ast.UnaryOp object at 0x7da1b1171660>]] return[call[name[DataFrame], parameter[name[vs]]]]
keyword[def] identifier[posthoc_nemenyi_friedman] ( identifier[a] , identifier[y_col] = keyword[None] , identifier[block_col] = keyword[None] , identifier[group_col] = keyword[None] , identifier[melted] = keyword[False] , identifier[sort] = keyword[False] ): literal[string] keyword[if] identifier[melted] keyword[and] keyword[not] identifier[all] ([ identifier[block_col] , identifier[group_col] , identifier[y_col] ]): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[def] identifier[compare_stats] ( identifier[i] , identifier[j] ): identifier[dif] = identifier[np] . identifier[abs] ( identifier[R] [ identifier[groups] [ identifier[i] ]]- identifier[R] [ identifier[groups] [ identifier[j] ]]) identifier[qval] = identifier[dif] / identifier[np] . identifier[sqrt] ( identifier[k] *( identifier[k] + literal[int] )/( literal[int] * identifier[n] )) keyword[return] identifier[qval] identifier[x] , identifier[_y_col] , identifier[_group_col] , identifier[_block_col] = identifier[__convert_to_block_df] ( identifier[a] , identifier[y_col] , identifier[group_col] , identifier[block_col] , identifier[melted] ) identifier[x] . identifier[sort_values] ( identifier[by] =[ identifier[_group_col] , identifier[_block_col] ], identifier[ascending] = keyword[True] , identifier[inplace] = keyword[True] ) identifier[x] . identifier[dropna] ( identifier[inplace] = keyword[True] ) identifier[groups] = identifier[x] [ identifier[_group_col] ]. identifier[unique] () identifier[k] = identifier[groups] . identifier[size] identifier[n] = identifier[x] [ identifier[_block_col] ]. identifier[unique] (). identifier[size] identifier[x] [ literal[string] ]= identifier[x] . identifier[groupby] ( identifier[_block_col] )[ identifier[_y_col] ]. identifier[rank] () identifier[R] = identifier[x] . identifier[groupby] ( identifier[_group_col] )[ literal[string] ]. identifier[mean] () identifier[vs] = identifier[np] . identifier[zeros] (( identifier[k] , identifier[k] )) identifier[combs] = identifier[it] . identifier[combinations] ( identifier[range] ( identifier[k] ), literal[int] ) identifier[tri_upper] = identifier[np] . identifier[triu_indices] ( identifier[vs] . identifier[shape] [ literal[int] ], literal[int] ) identifier[tri_lower] = identifier[np] . identifier[tril_indices] ( identifier[vs] . identifier[shape] [ literal[int] ],- literal[int] ) identifier[vs] [:,:]= literal[int] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[combs] : identifier[vs] [ identifier[i] , identifier[j] ]= identifier[compare_stats] ( identifier[i] , identifier[j] ) identifier[vs] *= identifier[np] . identifier[sqrt] ( literal[int] ) identifier[vs] [ identifier[tri_upper] ]= identifier[psturng] ( identifier[vs] [ identifier[tri_upper] ], identifier[k] , identifier[np] . identifier[inf] ) identifier[vs] [ identifier[tri_lower] ]= identifier[vs] . identifier[T] [ identifier[tri_lower] ] identifier[np] . identifier[fill_diagonal] ( identifier[vs] ,- literal[int] ) keyword[return] identifier[DataFrame] ( identifier[vs] , identifier[index] = identifier[groups] , identifier[columns] = identifier[groups] )
def posthoc_nemenyi_friedman(a, y_col=None, block_col=None, group_col=None, melted=False, sort=False): """Calculate pairwise comparisons using Nemenyi post hoc test for unreplicated blocked data. This test is usually conducted post hoc if significant results of the Friedman's test are obtained. The statistics refer to upper quantiles of the studentized range distribution (Tukey) [1]_, [2]_, [3]_. Parameters ---------- a : array_like or pandas DataFrame object An array, any object exposing the array interface or a pandas DataFrame. If `melted` is set to False (default), `a` is a typical matrix of block design, i.e. rows are blocks, and columns are groups. In this case you do not need to specify col arguments. If `a` is an array and `melted` is set to True, y_col, block_col and group_col must specify the indices of columns containing elements of correspondary type. If `a` is a Pandas DataFrame and `melted` is set to True, y_col, block_col and group_col must specify columns names (strings). y_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains y data. block_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains blocking factor values. group_col : str or int Must be specified if `a` is a pandas DataFrame object. Name of the column that contains treatment (group) factor values. melted : bool, optional Specifies if data are given as melted columns "y", "blocks", and "groups". sort : bool, optional If True, sort data by block and group columns. Returns ------- result : pandas DataFrame P values. Notes ----- A one-way ANOVA with repeated measures that is also referred to as ANOVA with unreplicated block design can also be conducted via Friedman's test. The consequent post hoc pairwise multiple comparison test according to Nemenyi is conducted with this function. This function does not test for ties. References ---------- .. [1] J. Demsar (2006), Statistical comparisons of classifiers over multiple data sets, Journal of Machine Learning Research, 7, 1-30. .. [2] P. Nemenyi (1963) Distribution-free Multiple Comparisons. Ph.D. thesis, Princeton University. .. [3] L. Sachs (1997), Angewandte Statistik. Berlin: Springer. Pages: 668-675. Examples -------- >>> # Non-melted case, x is a block design matrix, i.e. rows are blocks >>> # and columns are groups. >>> x = np.array([[31,27,24],[31,28,31],[45,29,46],[21,18,48],[42,36,46],[32,17,40]]) >>> sp.posthoc_nemenyi_friedman(x) """ if melted and (not all([block_col, group_col, y_col])): raise ValueError('block_col, group_col, y_col should be explicitly specified if using melted data') # depends on [control=['if'], data=[]] def compare_stats(i, j): dif = np.abs(R[groups[i]] - R[groups[j]]) qval = dif / np.sqrt(k * (k + 1.0) / (6.0 * n)) return qval (x, _y_col, _group_col, _block_col) = __convert_to_block_df(a, y_col, group_col, block_col, melted) #if not sort: # x[group_col] = Categorical(x[group_col], categories=x[group_col].unique(), ordered=True) # x[block_col] = Categorical(x[block_col], categories=x[block_col].unique(), ordered=True) x.sort_values(by=[_group_col, _block_col], ascending=True, inplace=True) x.dropna(inplace=True) groups = x[_group_col].unique() k = groups.size n = x[_block_col].unique().size x['mat'] = x.groupby(_block_col)[_y_col].rank() R = x.groupby(_group_col)['mat'].mean() vs = np.zeros((k, k)) combs = it.combinations(range(k), 2) tri_upper = np.triu_indices(vs.shape[0], 1) tri_lower = np.tril_indices(vs.shape[0], -1) vs[:, :] = 0 for (i, j) in combs: vs[i, j] = compare_stats(i, j) # depends on [control=['for'], data=[]] vs *= np.sqrt(2.0) vs[tri_upper] = psturng(vs[tri_upper], k, np.inf) vs[tri_lower] = vs.T[tri_lower] np.fill_diagonal(vs, -1) return DataFrame(vs, index=groups, columns=groups)
def instantiate(data, blueprint): """ Instantiate the given data using the blueprinter. Arguments --------- blueprint (collections.Mapping): a blueprint (JSON Schema with Seep properties) """ Validator = jsonschema.validators.validator_for(blueprint) blueprinter = extend(Validator)(blueprint) return blueprinter.instantiate(data)
def function[instantiate, parameter[data, blueprint]]: constant[ Instantiate the given data using the blueprinter. Arguments --------- blueprint (collections.Mapping): a blueprint (JSON Schema with Seep properties) ] variable[Validator] assign[=] call[name[jsonschema].validators.validator_for, parameter[name[blueprint]]] variable[blueprinter] assign[=] call[call[name[extend], parameter[name[Validator]]], parameter[name[blueprint]]] return[call[name[blueprinter].instantiate, parameter[name[data]]]]
keyword[def] identifier[instantiate] ( identifier[data] , identifier[blueprint] ): literal[string] identifier[Validator] = identifier[jsonschema] . identifier[validators] . identifier[validator_for] ( identifier[blueprint] ) identifier[blueprinter] = identifier[extend] ( identifier[Validator] )( identifier[blueprint] ) keyword[return] identifier[blueprinter] . identifier[instantiate] ( identifier[data] )
def instantiate(data, blueprint): """ Instantiate the given data using the blueprinter. Arguments --------- blueprint (collections.Mapping): a blueprint (JSON Schema with Seep properties) """ Validator = jsonschema.validators.validator_for(blueprint) blueprinter = extend(Validator)(blueprint) return blueprinter.instantiate(data)
def _generate_create_callable(name, display_name, arguments, regex, doc, supported, post_arguments, is_action): """ Returns a callable which conjures the URL for the resource and POSTs data """ def f(self, *args, **kwargs): for key, value in args[-1].items(): if type(value) == file: return self._put_or_post_multipart('POST', self._generate_url(regex, args[:-1]), args[-1]) return self._put_or_post_json('POST', self._generate_url(regex, args[:-1]), args[-1]) if is_action: f.__name__ = str(name) else: f.__name__ = str('create_%s' % name) f.__doc__ = doc f._resource_uri = regex f._get_args = arguments f._put_or_post_args = post_arguments f.resource_name = display_name f.is_api_call = True f.is_supported_api = supported return f
def function[_generate_create_callable, parameter[name, display_name, arguments, regex, doc, supported, post_arguments, is_action]]: constant[ Returns a callable which conjures the URL for the resource and POSTs data ] def function[f, parameter[self]]: for taget[tuple[[<ast.Name object at 0x7da1b0a70190>, <ast.Name object at 0x7da1b0a73460>]]] in starred[call[call[name[args]][<ast.UnaryOp object at 0x7da1b0a73340>].items, parameter[]]] begin[:] if compare[call[name[type], parameter[name[value]]] equal[==] name[file]] begin[:] return[call[name[self]._put_or_post_multipart, parameter[constant[POST], call[name[self]._generate_url, parameter[name[regex], call[name[args]][<ast.Slice object at 0x7da1b0a70f70>]]], call[name[args]][<ast.UnaryOp object at 0x7da1b0a71cc0>]]]] return[call[name[self]._put_or_post_json, parameter[constant[POST], call[name[self]._generate_url, parameter[name[regex], call[name[args]][<ast.Slice object at 0x7da1b0a70c10>]]], call[name[args]][<ast.UnaryOp object at 0x7da1b0a712a0>]]]] if name[is_action] begin[:] name[f].__name__ assign[=] call[name[str], parameter[name[name]]] name[f].__doc__ assign[=] name[doc] name[f]._resource_uri assign[=] name[regex] name[f]._get_args assign[=] name[arguments] name[f]._put_or_post_args assign[=] name[post_arguments] name[f].resource_name assign[=] name[display_name] name[f].is_api_call assign[=] constant[True] name[f].is_supported_api assign[=] name[supported] return[name[f]]
keyword[def] identifier[_generate_create_callable] ( identifier[name] , identifier[display_name] , identifier[arguments] , identifier[regex] , identifier[doc] , identifier[supported] , identifier[post_arguments] , identifier[is_action] ): literal[string] keyword[def] identifier[f] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): keyword[for] identifier[key] , identifier[value] keyword[in] identifier[args] [- literal[int] ]. identifier[items] (): keyword[if] identifier[type] ( identifier[value] )== identifier[file] : keyword[return] identifier[self] . identifier[_put_or_post_multipart] ( literal[string] , identifier[self] . identifier[_generate_url] ( identifier[regex] , identifier[args] [:- literal[int] ]), identifier[args] [- literal[int] ]) keyword[return] identifier[self] . identifier[_put_or_post_json] ( literal[string] , identifier[self] . identifier[_generate_url] ( identifier[regex] , identifier[args] [:- literal[int] ]), identifier[args] [- literal[int] ]) keyword[if] identifier[is_action] : identifier[f] . identifier[__name__] = identifier[str] ( identifier[name] ) keyword[else] : identifier[f] . identifier[__name__] = identifier[str] ( literal[string] % identifier[name] ) identifier[f] . identifier[__doc__] = identifier[doc] identifier[f] . identifier[_resource_uri] = identifier[regex] identifier[f] . identifier[_get_args] = identifier[arguments] identifier[f] . identifier[_put_or_post_args] = identifier[post_arguments] identifier[f] . identifier[resource_name] = identifier[display_name] identifier[f] . identifier[is_api_call] = keyword[True] identifier[f] . identifier[is_supported_api] = identifier[supported] keyword[return] identifier[f]
def _generate_create_callable(name, display_name, arguments, regex, doc, supported, post_arguments, is_action): """ Returns a callable which conjures the URL for the resource and POSTs data """ def f(self, *args, **kwargs): for (key, value) in args[-1].items(): if type(value) == file: return self._put_or_post_multipart('POST', self._generate_url(regex, args[:-1]), args[-1]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return self._put_or_post_json('POST', self._generate_url(regex, args[:-1]), args[-1]) if is_action: f.__name__ = str(name) # depends on [control=['if'], data=[]] else: f.__name__ = str('create_%s' % name) f.__doc__ = doc f._resource_uri = regex f._get_args = arguments f._put_or_post_args = post_arguments f.resource_name = display_name f.is_api_call = True f.is_supported_api = supported return f
def getoutputfile(self, loadmetadata=True, client=None,requiremetadata=False): """Grabs one output file (raises a StopIteration exception if there is none). Shortcut for getoutputfiles()""" return next(self.getoutputfiles(loadmetadata,client,requiremetadata))
def function[getoutputfile, parameter[self, loadmetadata, client, requiremetadata]]: constant[Grabs one output file (raises a StopIteration exception if there is none). Shortcut for getoutputfiles()] return[call[name[next], parameter[call[name[self].getoutputfiles, parameter[name[loadmetadata], name[client], name[requiremetadata]]]]]]
keyword[def] identifier[getoutputfile] ( identifier[self] , identifier[loadmetadata] = keyword[True] , identifier[client] = keyword[None] , identifier[requiremetadata] = keyword[False] ): literal[string] keyword[return] identifier[next] ( identifier[self] . identifier[getoutputfiles] ( identifier[loadmetadata] , identifier[client] , identifier[requiremetadata] ))
def getoutputfile(self, loadmetadata=True, client=None, requiremetadata=False): """Grabs one output file (raises a StopIteration exception if there is none). Shortcut for getoutputfiles()""" return next(self.getoutputfiles(loadmetadata, client, requiremetadata))
def rows_above_layout(self): """ Return the number of rows visible in the terminal above the layout. """ if self._in_alternate_screen: return 0 elif self._min_available_height > 0: total_rows = self.output.get_size().rows last_screen_height = self._last_screen.height if self._last_screen else 0 return total_rows - max(self._min_available_height, last_screen_height) else: raise HeightIsUnknownError('Rows above layout is unknown.')
def function[rows_above_layout, parameter[self]]: constant[ Return the number of rows visible in the terminal above the layout. ] if name[self]._in_alternate_screen begin[:] return[constant[0]]
keyword[def] identifier[rows_above_layout] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_in_alternate_screen] : keyword[return] literal[int] keyword[elif] identifier[self] . identifier[_min_available_height] > literal[int] : identifier[total_rows] = identifier[self] . identifier[output] . identifier[get_size] (). identifier[rows] identifier[last_screen_height] = identifier[self] . identifier[_last_screen] . identifier[height] keyword[if] identifier[self] . identifier[_last_screen] keyword[else] literal[int] keyword[return] identifier[total_rows] - identifier[max] ( identifier[self] . identifier[_min_available_height] , identifier[last_screen_height] ) keyword[else] : keyword[raise] identifier[HeightIsUnknownError] ( literal[string] )
def rows_above_layout(self): """ Return the number of rows visible in the terminal above the layout. """ if self._in_alternate_screen: return 0 # depends on [control=['if'], data=[]] elif self._min_available_height > 0: total_rows = self.output.get_size().rows last_screen_height = self._last_screen.height if self._last_screen else 0 return total_rows - max(self._min_available_height, last_screen_height) # depends on [control=['if'], data=[]] else: raise HeightIsUnknownError('Rows above layout is unknown.')
def _value_and_batch_jacobian(f, x): """Enables uniform interface to value and batch jacobian calculation. Works in both eager and graph modes. Arguments: f: The scalar function to evaluate. x: The value at which to compute the value and the batch jacobian. Returns: A tuple (f(x), J(x)), where J(x) is the batch jacobian. """ if tf.executing_eagerly(): with tf.GradientTape() as tape: tape.watch(x) value = f(x) batch_jacobian = tape.batch_jacobian(value, x) else: value = f(x) batch_jacobian = gradients.batch_jacobian(value, x) return value, batch_jacobian
def function[_value_and_batch_jacobian, parameter[f, x]]: constant[Enables uniform interface to value and batch jacobian calculation. Works in both eager and graph modes. Arguments: f: The scalar function to evaluate. x: The value at which to compute the value and the batch jacobian. Returns: A tuple (f(x), J(x)), where J(x) is the batch jacobian. ] if call[name[tf].executing_eagerly, parameter[]] begin[:] with call[name[tf].GradientTape, parameter[]] begin[:] call[name[tape].watch, parameter[name[x]]] variable[value] assign[=] call[name[f], parameter[name[x]]] variable[batch_jacobian] assign[=] call[name[tape].batch_jacobian, parameter[name[value], name[x]]] return[tuple[[<ast.Name object at 0x7da1b03f8c70>, <ast.Name object at 0x7da1b03f8a90>]]]
keyword[def] identifier[_value_and_batch_jacobian] ( identifier[f] , identifier[x] ): literal[string] keyword[if] identifier[tf] . identifier[executing_eagerly] (): keyword[with] identifier[tf] . identifier[GradientTape] () keyword[as] identifier[tape] : identifier[tape] . identifier[watch] ( identifier[x] ) identifier[value] = identifier[f] ( identifier[x] ) identifier[batch_jacobian] = identifier[tape] . identifier[batch_jacobian] ( identifier[value] , identifier[x] ) keyword[else] : identifier[value] = identifier[f] ( identifier[x] ) identifier[batch_jacobian] = identifier[gradients] . identifier[batch_jacobian] ( identifier[value] , identifier[x] ) keyword[return] identifier[value] , identifier[batch_jacobian]
def _value_and_batch_jacobian(f, x): """Enables uniform interface to value and batch jacobian calculation. Works in both eager and graph modes. Arguments: f: The scalar function to evaluate. x: The value at which to compute the value and the batch jacobian. Returns: A tuple (f(x), J(x)), where J(x) is the batch jacobian. """ if tf.executing_eagerly(): with tf.GradientTape() as tape: tape.watch(x) value = f(x) # depends on [control=['with'], data=['tape']] batch_jacobian = tape.batch_jacobian(value, x) # depends on [control=['if'], data=[]] else: value = f(x) batch_jacobian = gradients.batch_jacobian(value, x) return (value, batch_jacobian)
def get_revocation_reason(self): """Get the revocation reason of this certificate.""" if self.revoked is False: return if self.revoked_reason == '' or self.revoked_reason is None: return x509.ReasonFlags.unspecified else: return getattr(x509.ReasonFlags, self.revoked_reason)
def function[get_revocation_reason, parameter[self]]: constant[Get the revocation reason of this certificate.] if compare[name[self].revoked is constant[False]] begin[:] return[None] if <ast.BoolOp object at 0x7da2044c08b0> begin[:] return[name[x509].ReasonFlags.unspecified]
keyword[def] identifier[get_revocation_reason] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[revoked] keyword[is] keyword[False] : keyword[return] keyword[if] identifier[self] . identifier[revoked_reason] == literal[string] keyword[or] identifier[self] . identifier[revoked_reason] keyword[is] keyword[None] : keyword[return] identifier[x509] . identifier[ReasonFlags] . identifier[unspecified] keyword[else] : keyword[return] identifier[getattr] ( identifier[x509] . identifier[ReasonFlags] , identifier[self] . identifier[revoked_reason] )
def get_revocation_reason(self): """Get the revocation reason of this certificate.""" if self.revoked is False: return # depends on [control=['if'], data=[]] if self.revoked_reason == '' or self.revoked_reason is None: return x509.ReasonFlags.unspecified # depends on [control=['if'], data=[]] else: return getattr(x509.ReasonFlags, self.revoked_reason)
def read_input(buf, has_header = True): """Read the input from the given buffer (or stdin if no buffer) is supplied. An optional header may be present as well""" # Use stdin if there is no supplied buffer if buf is None: buf = sys.stdin # Attempt to read a header if necessary header = {} if has_header: # Until we get a blank line, read "attr:val" lines, # setting the values in 'header' last_attr = None while True: line = buf.readline() # remove lastcharacter (which is a newline) line = line[:-1] # When we encounter a newline, we are done with the header if len(line) == 0: break colon = line.find(':') # If we can't find a colon, then it might be that we are # on a new line, and it belongs to the previous attribute if colon < 0: if last_attr: header[last_attr] = header[last_attr] + '\n' + urllib.parse.unquote(line) else: continue # extract it and set value in settings last_attr = attr = line[:colon] val = urllib.parse.unquote(line[colon+1:]) header[attr] = val return buf, header
def function[read_input, parameter[buf, has_header]]: constant[Read the input from the given buffer (or stdin if no buffer) is supplied. An optional header may be present as well] if compare[name[buf] is constant[None]] begin[:] variable[buf] assign[=] name[sys].stdin variable[header] assign[=] dictionary[[], []] if name[has_header] begin[:] variable[last_attr] assign[=] constant[None] while constant[True] begin[:] variable[line] assign[=] call[name[buf].readline, parameter[]] variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da1b1983820>] if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:] break variable[colon] assign[=] call[name[line].find, parameter[constant[:]]] if compare[name[colon] less[<] constant[0]] begin[:] if name[last_attr] begin[:] call[name[header]][name[last_attr]] assign[=] binary_operation[binary_operation[call[name[header]][name[last_attr]] + constant[ ]] + call[name[urllib].parse.unquote, parameter[name[line]]]] variable[last_attr] assign[=] call[name[line]][<ast.Slice object at 0x7da1b1980790>] variable[val] assign[=] call[name[urllib].parse.unquote, parameter[call[name[line]][<ast.Slice object at 0x7da1b1980820>]]] call[name[header]][name[attr]] assign[=] name[val] return[tuple[[<ast.Name object at 0x7da1b19811b0>, <ast.Name object at 0x7da1b19828c0>]]]
keyword[def] identifier[read_input] ( identifier[buf] , identifier[has_header] = keyword[True] ): literal[string] keyword[if] identifier[buf] keyword[is] keyword[None] : identifier[buf] = identifier[sys] . identifier[stdin] identifier[header] ={} keyword[if] identifier[has_header] : identifier[last_attr] = keyword[None] keyword[while] keyword[True] : identifier[line] = identifier[buf] . identifier[readline] () identifier[line] = identifier[line] [:- literal[int] ] keyword[if] identifier[len] ( identifier[line] )== literal[int] : keyword[break] identifier[colon] = identifier[line] . identifier[find] ( literal[string] ) keyword[if] identifier[colon] < literal[int] : keyword[if] identifier[last_attr] : identifier[header] [ identifier[last_attr] ]= identifier[header] [ identifier[last_attr] ]+ literal[string] + identifier[urllib] . identifier[parse] . identifier[unquote] ( identifier[line] ) keyword[else] : keyword[continue] identifier[last_attr] = identifier[attr] = identifier[line] [: identifier[colon] ] identifier[val] = identifier[urllib] . identifier[parse] . identifier[unquote] ( identifier[line] [ identifier[colon] + literal[int] :]) identifier[header] [ identifier[attr] ]= identifier[val] keyword[return] identifier[buf] , identifier[header]
def read_input(buf, has_header=True): """Read the input from the given buffer (or stdin if no buffer) is supplied. An optional header may be present as well""" # Use stdin if there is no supplied buffer if buf is None: buf = sys.stdin # depends on [control=['if'], data=['buf']] # Attempt to read a header if necessary header = {} if has_header: # Until we get a blank line, read "attr:val" lines, # setting the values in 'header' last_attr = None while True: line = buf.readline() # remove lastcharacter (which is a newline) line = line[:-1] # When we encounter a newline, we are done with the header if len(line) == 0: break # depends on [control=['if'], data=[]] colon = line.find(':') # If we can't find a colon, then it might be that we are # on a new line, and it belongs to the previous attribute if colon < 0: if last_attr: header[last_attr] = header[last_attr] + '\n' + urllib.parse.unquote(line) # depends on [control=['if'], data=[]] else: continue # depends on [control=['if'], data=[]] # extract it and set value in settings last_attr = attr = line[:colon] val = urllib.parse.unquote(line[colon + 1:]) header[attr] = val # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] return (buf, header)
def to_ascii_equivalent(text): """ Converts any non-ASCII characters (accents, etc.) to their best-fit ASCII equivalents """ if text is None: return None elif isinstance(text, binary_type): text = text.decode(DEFAULT_ENCODING) elif not isinstance(text, text_type): text = text_type(text) text = EMPTY_STR.join(_ASCII_PUNCTUATION_MAP.get(c, c) for c in text) return EMPTY_STR.join(c for c in unicodedata.normalize('NFD', text) if unicodedata.category(c) != 'Mn')
def function[to_ascii_equivalent, parameter[text]]: constant[ Converts any non-ASCII characters (accents, etc.) to their best-fit ASCII equivalents ] if compare[name[text] is constant[None]] begin[:] return[constant[None]] variable[text] assign[=] call[name[EMPTY_STR].join, parameter[<ast.GeneratorExp object at 0x7da207f03640>]] return[call[name[EMPTY_STR].join, parameter[<ast.GeneratorExp object at 0x7da207f02bf0>]]]
keyword[def] identifier[to_ascii_equivalent] ( identifier[text] ): literal[string] keyword[if] identifier[text] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[elif] identifier[isinstance] ( identifier[text] , identifier[binary_type] ): identifier[text] = identifier[text] . identifier[decode] ( identifier[DEFAULT_ENCODING] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[text] , identifier[text_type] ): identifier[text] = identifier[text_type] ( identifier[text] ) identifier[text] = identifier[EMPTY_STR] . identifier[join] ( identifier[_ASCII_PUNCTUATION_MAP] . identifier[get] ( identifier[c] , identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[text] ) keyword[return] identifier[EMPTY_STR] . identifier[join] ( identifier[c] keyword[for] identifier[c] keyword[in] identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[text] ) keyword[if] identifier[unicodedata] . identifier[category] ( identifier[c] )!= literal[string] )
def to_ascii_equivalent(text): """ Converts any non-ASCII characters (accents, etc.) to their best-fit ASCII equivalents """ if text is None: return None # depends on [control=['if'], data=[]] elif isinstance(text, binary_type): text = text.decode(DEFAULT_ENCODING) # depends on [control=['if'], data=[]] elif not isinstance(text, text_type): text = text_type(text) # depends on [control=['if'], data=[]] text = EMPTY_STR.join((_ASCII_PUNCTUATION_MAP.get(c, c) for c in text)) return EMPTY_STR.join((c for c in unicodedata.normalize('NFD', text) if unicodedata.category(c) != 'Mn'))
def meanangledAngle(self,dangle,smallest=False): """ NAME: meanangledAngle PURPOSE: calculate the mean perpendicular angle at a given angle INPUT: dangle - angle offset along the stream smallest= (False) calculate for smallest eigenvalue direction rather than for middle OUTPUT: mean perpendicular angle HISTORY: 2013-12-06 - Written - Bovy (IAS) """ if smallest: eigIndx= 0 else: eigIndx= 1 aplow= numpy.amax([numpy.sqrt(self._sortedSigOEig[eigIndx])\ *self._tdisrupt*5., self._sigangle]) num= integrate.quad(lambda x: x*self.pangledAngle(x,dangle,smallest), aplow,-aplow)[0] denom= integrate.quad(self.pangledAngle,aplow,-aplow, (dangle,smallest))[0] if denom == 0.: return numpy.nan else: return num/denom
def function[meanangledAngle, parameter[self, dangle, smallest]]: constant[ NAME: meanangledAngle PURPOSE: calculate the mean perpendicular angle at a given angle INPUT: dangle - angle offset along the stream smallest= (False) calculate for smallest eigenvalue direction rather than for middle OUTPUT: mean perpendicular angle HISTORY: 2013-12-06 - Written - Bovy (IAS) ] if name[smallest] begin[:] variable[eigIndx] assign[=] constant[0] variable[aplow] assign[=] call[name[numpy].amax, parameter[list[[<ast.BinOp object at 0x7da1b0c455a0>, <ast.Attribute object at 0x7da1b0c45d80>]]]] variable[num] assign[=] call[call[name[integrate].quad, parameter[<ast.Lambda object at 0x7da1b0c45180>, name[aplow], <ast.UnaryOp object at 0x7da1b0c45db0>]]][constant[0]] variable[denom] assign[=] call[call[name[integrate].quad, parameter[name[self].pangledAngle, name[aplow], <ast.UnaryOp object at 0x7da1b0c45e40>, tuple[[<ast.Name object at 0x7da1b0c464d0>, <ast.Name object at 0x7da1b0c45fc0>]]]]][constant[0]] if compare[name[denom] equal[==] constant[0.0]] begin[:] return[name[numpy].nan]
keyword[def] identifier[meanangledAngle] ( identifier[self] , identifier[dangle] , identifier[smallest] = keyword[False] ): literal[string] keyword[if] identifier[smallest] : identifier[eigIndx] = literal[int] keyword[else] : identifier[eigIndx] = literal[int] identifier[aplow] = identifier[numpy] . identifier[amax] ([ identifier[numpy] . identifier[sqrt] ( identifier[self] . identifier[_sortedSigOEig] [ identifier[eigIndx] ])* identifier[self] . identifier[_tdisrupt] * literal[int] , identifier[self] . identifier[_sigangle] ]) identifier[num] = identifier[integrate] . identifier[quad] ( keyword[lambda] identifier[x] : identifier[x] * identifier[self] . identifier[pangledAngle] ( identifier[x] , identifier[dangle] , identifier[smallest] ), identifier[aplow] ,- identifier[aplow] )[ literal[int] ] identifier[denom] = identifier[integrate] . identifier[quad] ( identifier[self] . identifier[pangledAngle] , identifier[aplow] ,- identifier[aplow] , ( identifier[dangle] , identifier[smallest] ))[ literal[int] ] keyword[if] identifier[denom] == literal[int] : keyword[return] identifier[numpy] . identifier[nan] keyword[else] : keyword[return] identifier[num] / identifier[denom]
def meanangledAngle(self, dangle, smallest=False): """ NAME: meanangledAngle PURPOSE: calculate the mean perpendicular angle at a given angle INPUT: dangle - angle offset along the stream smallest= (False) calculate for smallest eigenvalue direction rather than for middle OUTPUT: mean perpendicular angle HISTORY: 2013-12-06 - Written - Bovy (IAS) """ if smallest: eigIndx = 0 # depends on [control=['if'], data=[]] else: eigIndx = 1 aplow = numpy.amax([numpy.sqrt(self._sortedSigOEig[eigIndx]) * self._tdisrupt * 5.0, self._sigangle]) num = integrate.quad(lambda x: x * self.pangledAngle(x, dangle, smallest), aplow, -aplow)[0] denom = integrate.quad(self.pangledAngle, aplow, -aplow, (dangle, smallest))[0] if denom == 0.0: return numpy.nan # depends on [control=['if'], data=[]] else: return num / denom
def seperate_end_page_links(stream_data): ''' Seperate out page blocks at the end of a StreamField. Accepts: List of streamfield blocks Returns: Tuple of 2 lists of blocks - (remaining body, final article) ''' stream_data_copy = list(stream_data) end_page_links = [] for block in stream_data_copy[::-1]: if block['type'] == 'page': end_page_links.insert(0, block) stream_data_copy.pop() else: break return (stream_data_copy, end_page_links)
def function[seperate_end_page_links, parameter[stream_data]]: constant[ Seperate out page blocks at the end of a StreamField. Accepts: List of streamfield blocks Returns: Tuple of 2 lists of blocks - (remaining body, final article) ] variable[stream_data_copy] assign[=] call[name[list], parameter[name[stream_data]]] variable[end_page_links] assign[=] list[[]] for taget[name[block]] in starred[call[name[stream_data_copy]][<ast.Slice object at 0x7da1b03a4970>]] begin[:] if compare[call[name[block]][constant[type]] equal[==] constant[page]] begin[:] call[name[end_page_links].insert, parameter[constant[0], name[block]]] call[name[stream_data_copy].pop, parameter[]] return[tuple[[<ast.Name object at 0x7da1b03a4e50>, <ast.Name object at 0x7da1b03a4d30>]]]
keyword[def] identifier[seperate_end_page_links] ( identifier[stream_data] ): literal[string] identifier[stream_data_copy] = identifier[list] ( identifier[stream_data] ) identifier[end_page_links] =[] keyword[for] identifier[block] keyword[in] identifier[stream_data_copy] [::- literal[int] ]: keyword[if] identifier[block] [ literal[string] ]== literal[string] : identifier[end_page_links] . identifier[insert] ( literal[int] , identifier[block] ) identifier[stream_data_copy] . identifier[pop] () keyword[else] : keyword[break] keyword[return] ( identifier[stream_data_copy] , identifier[end_page_links] )
def seperate_end_page_links(stream_data): """ Seperate out page blocks at the end of a StreamField. Accepts: List of streamfield blocks Returns: Tuple of 2 lists of blocks - (remaining body, final article) """ stream_data_copy = list(stream_data) end_page_links = [] for block in stream_data_copy[::-1]: if block['type'] == 'page': end_page_links.insert(0, block) stream_data_copy.pop() # depends on [control=['if'], data=[]] else: break # depends on [control=['for'], data=['block']] return (stream_data_copy, end_page_links)
def _generate_config(self): """Generate a configuration that can be sent to the Hottop roaster. Configuration settings need to be represented inside of a byte array that is then written to the serial interface. Much of the configuration is static, but control settings are also included and pulled from the shared dictionary. :returns: Byte array of the prepared configuration. """ config = bytearray([0x00] * 36) config[0] = 0xA5 config[1] = 0x96 config[2] = 0xB0 config[3] = 0xA0 config[4] = 0x01 config[5] = 0x01 config[6] = 0x24 config[10] = self._config.get('heater', 0) config[11] = self._config.get('fan', 0) config[12] = self._config.get('main_fan', 0) config[16] = self._config.get('solenoid', 0) config[17] = self._config.get('drum_motor', 0) if self._config.get('heater', 0) > 0: # Override the user here since the drum MUST be on for heat config[17] = 1 config[18] = self._config.get('cooling_motor', 0) config[35] = sum([b for b in config[:35]]) & 0xFF return bytes(config)
def function[_generate_config, parameter[self]]: constant[Generate a configuration that can be sent to the Hottop roaster. Configuration settings need to be represented inside of a byte array that is then written to the serial interface. Much of the configuration is static, but control settings are also included and pulled from the shared dictionary. :returns: Byte array of the prepared configuration. ] variable[config] assign[=] call[name[bytearray], parameter[binary_operation[list[[<ast.Constant object at 0x7da2044c2b60>]] * constant[36]]]] call[name[config]][constant[0]] assign[=] constant[165] call[name[config]][constant[1]] assign[=] constant[150] call[name[config]][constant[2]] assign[=] constant[176] call[name[config]][constant[3]] assign[=] constant[160] call[name[config]][constant[4]] assign[=] constant[1] call[name[config]][constant[5]] assign[=] constant[1] call[name[config]][constant[6]] assign[=] constant[36] call[name[config]][constant[10]] assign[=] call[name[self]._config.get, parameter[constant[heater], constant[0]]] call[name[config]][constant[11]] assign[=] call[name[self]._config.get, parameter[constant[fan], constant[0]]] call[name[config]][constant[12]] assign[=] call[name[self]._config.get, parameter[constant[main_fan], constant[0]]] call[name[config]][constant[16]] assign[=] call[name[self]._config.get, parameter[constant[solenoid], constant[0]]] call[name[config]][constant[17]] assign[=] call[name[self]._config.get, parameter[constant[drum_motor], constant[0]]] if compare[call[name[self]._config.get, parameter[constant[heater], constant[0]]] greater[>] constant[0]] begin[:] call[name[config]][constant[17]] assign[=] constant[1] call[name[config]][constant[18]] assign[=] call[name[self]._config.get, parameter[constant[cooling_motor], constant[0]]] call[name[config]][constant[35]] assign[=] binary_operation[call[name[sum], parameter[<ast.ListComp object at 0x7da207f9abc0>]] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] return[call[name[bytes], parameter[name[config]]]]
keyword[def] identifier[_generate_config] ( identifier[self] ): literal[string] identifier[config] = identifier[bytearray] ([ literal[int] ]* literal[int] ) identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] ) identifier[config] [ literal[int] ]= identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] ) identifier[config] [ literal[int] ]= identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] ) identifier[config] [ literal[int] ]= identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] ) identifier[config] [ literal[int] ]= identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] ) keyword[if] identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] )> literal[int] : identifier[config] [ literal[int] ]= literal[int] identifier[config] [ literal[int] ]= identifier[self] . identifier[_config] . identifier[get] ( literal[string] , literal[int] ) identifier[config] [ literal[int] ]= identifier[sum] ([ identifier[b] keyword[for] identifier[b] keyword[in] identifier[config] [: literal[int] ]])& literal[int] keyword[return] identifier[bytes] ( identifier[config] )
def _generate_config(self): """Generate a configuration that can be sent to the Hottop roaster. Configuration settings need to be represented inside of a byte array that is then written to the serial interface. Much of the configuration is static, but control settings are also included and pulled from the shared dictionary. :returns: Byte array of the prepared configuration. """ config = bytearray([0] * 36) config[0] = 165 config[1] = 150 config[2] = 176 config[3] = 160 config[4] = 1 config[5] = 1 config[6] = 36 config[10] = self._config.get('heater', 0) config[11] = self._config.get('fan', 0) config[12] = self._config.get('main_fan', 0) config[16] = self._config.get('solenoid', 0) config[17] = self._config.get('drum_motor', 0) if self._config.get('heater', 0) > 0: # Override the user here since the drum MUST be on for heat config[17] = 1 # depends on [control=['if'], data=[]] config[18] = self._config.get('cooling_motor', 0) config[35] = sum([b for b in config[:35]]) & 255 return bytes(config)
def _update_from_pb(self, app_profile_pb): """Refresh self from the server-provided protobuf. Helper for :meth:`from_pb` and :meth:`reload`. """ self.routing_policy_type = None self.allow_transactional_writes = None self.cluster_id = None self.description = app_profile_pb.description routing_policy_type = None if app_profile_pb.HasField("multi_cluster_routing_use_any"): routing_policy_type = RoutingPolicyType.ANY self.allow_transactional_writes = False else: routing_policy_type = RoutingPolicyType.SINGLE self.cluster_id = app_profile_pb.single_cluster_routing.cluster_id self.allow_transactional_writes = ( app_profile_pb.single_cluster_routing.allow_transactional_writes ) self.routing_policy_type = routing_policy_type
def function[_update_from_pb, parameter[self, app_profile_pb]]: constant[Refresh self from the server-provided protobuf. Helper for :meth:`from_pb` and :meth:`reload`. ] name[self].routing_policy_type assign[=] constant[None] name[self].allow_transactional_writes assign[=] constant[None] name[self].cluster_id assign[=] constant[None] name[self].description assign[=] name[app_profile_pb].description variable[routing_policy_type] assign[=] constant[None] if call[name[app_profile_pb].HasField, parameter[constant[multi_cluster_routing_use_any]]] begin[:] variable[routing_policy_type] assign[=] name[RoutingPolicyType].ANY name[self].allow_transactional_writes assign[=] constant[False] name[self].routing_policy_type assign[=] name[routing_policy_type]
keyword[def] identifier[_update_from_pb] ( identifier[self] , identifier[app_profile_pb] ): literal[string] identifier[self] . identifier[routing_policy_type] = keyword[None] identifier[self] . identifier[allow_transactional_writes] = keyword[None] identifier[self] . identifier[cluster_id] = keyword[None] identifier[self] . identifier[description] = identifier[app_profile_pb] . identifier[description] identifier[routing_policy_type] = keyword[None] keyword[if] identifier[app_profile_pb] . identifier[HasField] ( literal[string] ): identifier[routing_policy_type] = identifier[RoutingPolicyType] . identifier[ANY] identifier[self] . identifier[allow_transactional_writes] = keyword[False] keyword[else] : identifier[routing_policy_type] = identifier[RoutingPolicyType] . identifier[SINGLE] identifier[self] . identifier[cluster_id] = identifier[app_profile_pb] . identifier[single_cluster_routing] . identifier[cluster_id] identifier[self] . identifier[allow_transactional_writes] =( identifier[app_profile_pb] . identifier[single_cluster_routing] . identifier[allow_transactional_writes] ) identifier[self] . identifier[routing_policy_type] = identifier[routing_policy_type]
def _update_from_pb(self, app_profile_pb): """Refresh self from the server-provided protobuf. Helper for :meth:`from_pb` and :meth:`reload`. """ self.routing_policy_type = None self.allow_transactional_writes = None self.cluster_id = None self.description = app_profile_pb.description routing_policy_type = None if app_profile_pb.HasField('multi_cluster_routing_use_any'): routing_policy_type = RoutingPolicyType.ANY self.allow_transactional_writes = False # depends on [control=['if'], data=[]] else: routing_policy_type = RoutingPolicyType.SINGLE self.cluster_id = app_profile_pb.single_cluster_routing.cluster_id self.allow_transactional_writes = app_profile_pb.single_cluster_routing.allow_transactional_writes self.routing_policy_type = routing_policy_type
def add_serverconnection_methods(cls): """Add a bunch of methods to an :class:`irc.client.SimpleIRCClient` to send commands and messages. Basically it wraps a bunch of methdos from :class:`irc.client.ServerConnection` to be :meth:`irc.schedule.IScheduler.execute_after`. That way, you can easily send, even if the IRCClient is running in :class:`IRCClient.process_forever` in another thread. On the plus side you can use positional and keyword arguments instead of just positional ones. :param cls: The class to add the methods do. :type cls: :class:`irc.client.SimpleIRCClient` :returns: None """ methods = ['action', 'admin', 'cap', 'ctcp', 'ctcp_reply', 'globops', 'info', 'invite', 'ison', 'join', 'kick', 'links', 'list', 'lusers', 'mode', 'motd', 'names', 'nick', 'notice', 'oper', 'part', 'part', 'pass_', 'ping', 'pong', 'privmsg', 'privmsg_many', 'quit', 'send_raw', 'squit', 'stats', 'time', 'topic', 'trace', 'user', 'userhost', 'users', 'version', 'wallops', 'who', 'whois', 'whowas'] for m in methods: method = _wrap_execute_after(m) f = getattr(irc.client.ServerConnection, m) method.__doc__ = f.__doc__ setattr(cls, method.__name__, method) return cls
def function[add_serverconnection_methods, parameter[cls]]: constant[Add a bunch of methods to an :class:`irc.client.SimpleIRCClient` to send commands and messages. Basically it wraps a bunch of methdos from :class:`irc.client.ServerConnection` to be :meth:`irc.schedule.IScheduler.execute_after`. That way, you can easily send, even if the IRCClient is running in :class:`IRCClient.process_forever` in another thread. On the plus side you can use positional and keyword arguments instead of just positional ones. :param cls: The class to add the methods do. :type cls: :class:`irc.client.SimpleIRCClient` :returns: None ] variable[methods] assign[=] list[[<ast.Constant object at 0x7da1b0bd5f30>, <ast.Constant object at 0x7da1b0bd7f40>, <ast.Constant object at 0x7da1b0bd72e0>, <ast.Constant object at 0x7da1b0bd4a30>, <ast.Constant object at 0x7da1b0bd5c60>, <ast.Constant object at 0x7da1b0bd4be0>, <ast.Constant object at 0x7da1b0bd6020>, <ast.Constant object at 0x7da1b0bd6b60>, <ast.Constant object at 0x7da1b0bd6ce0>, <ast.Constant object at 0x7da1b0bd6dd0>, <ast.Constant object at 0x7da1b0bd66b0>, <ast.Constant object at 0x7da1b0bd6470>, <ast.Constant object at 0x7da1b0bd6320>, <ast.Constant object at 0x7da1b0bd4b50>, <ast.Constant object at 0x7da1b0bd4340>, <ast.Constant object at 0x7da1b0bd60e0>, <ast.Constant object at 0x7da1b0bd53f0>, <ast.Constant object at 0x7da1b0bd7400>, <ast.Constant object at 0x7da1b0bd44f0>, <ast.Constant object at 0x7da1b0bd6380>, <ast.Constant object at 0x7da1b0bd72b0>, <ast.Constant object at 0x7da1b0bd55a0>, <ast.Constant object at 0x7da1b0bd7d90>, <ast.Constant object at 0x7da1b0bd6200>, <ast.Constant object at 0x7da1b0bd6c50>, <ast.Constant object at 0x7da1b0bd5570>, <ast.Constant object at 0x7da1b0bd5720>, <ast.Constant object at 0x7da1b0bd5690>, <ast.Constant object at 0x7da1b0bd7160>, <ast.Constant object at 0x7da1b0bd4550>, <ast.Constant object at 0x7da1b0bd6770>, <ast.Constant object at 0x7da1b0bd5ff0>, <ast.Constant object at 0x7da1b0bd4d00>, <ast.Constant object at 0x7da1b0bd5d50>, <ast.Constant object at 0x7da1b0bd4160>, <ast.Constant object at 0x7da1b0bd6980>, <ast.Constant object at 0x7da1b0bd5e10>, <ast.Constant object at 0x7da1b0bd54b0>, <ast.Constant object at 0x7da1b0bd5150>, <ast.Constant object at 0x7da1b0bd4850>, <ast.Constant object at 0x7da1b0bd46d0>, <ast.Constant object at 0x7da1b0bd4fa0>]] for taget[name[m]] in starred[name[methods]] begin[:] variable[method] assign[=] call[name[_wrap_execute_after], parameter[name[m]]] variable[f] assign[=] call[name[getattr], parameter[name[irc].client.ServerConnection, name[m]]] name[method].__doc__ assign[=] name[f].__doc__ call[name[setattr], parameter[name[cls], name[method].__name__, name[method]]] return[name[cls]]
keyword[def] identifier[add_serverconnection_methods] ( identifier[cls] ): literal[string] identifier[methods] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[m] keyword[in] identifier[methods] : identifier[method] = identifier[_wrap_execute_after] ( identifier[m] ) identifier[f] = identifier[getattr] ( identifier[irc] . identifier[client] . identifier[ServerConnection] , identifier[m] ) identifier[method] . identifier[__doc__] = identifier[f] . identifier[__doc__] identifier[setattr] ( identifier[cls] , identifier[method] . identifier[__name__] , identifier[method] ) keyword[return] identifier[cls]
def add_serverconnection_methods(cls): """Add a bunch of methods to an :class:`irc.client.SimpleIRCClient` to send commands and messages. Basically it wraps a bunch of methdos from :class:`irc.client.ServerConnection` to be :meth:`irc.schedule.IScheduler.execute_after`. That way, you can easily send, even if the IRCClient is running in :class:`IRCClient.process_forever` in another thread. On the plus side you can use positional and keyword arguments instead of just positional ones. :param cls: The class to add the methods do. :type cls: :class:`irc.client.SimpleIRCClient` :returns: None """ methods = ['action', 'admin', 'cap', 'ctcp', 'ctcp_reply', 'globops', 'info', 'invite', 'ison', 'join', 'kick', 'links', 'list', 'lusers', 'mode', 'motd', 'names', 'nick', 'notice', 'oper', 'part', 'part', 'pass_', 'ping', 'pong', 'privmsg', 'privmsg_many', 'quit', 'send_raw', 'squit', 'stats', 'time', 'topic', 'trace', 'user', 'userhost', 'users', 'version', 'wallops', 'who', 'whois', 'whowas'] for m in methods: method = _wrap_execute_after(m) f = getattr(irc.client.ServerConnection, m) method.__doc__ = f.__doc__ setattr(cls, method.__name__, method) # depends on [control=['for'], data=['m']] return cls
def upload(self, media_type, media_file): """ 上传临时素材 详情请参考 http://mp.weixin.qq.com/wiki/5/963fc70b80dc75483a271298a76a8d59.html :param media_type: 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb) :param media_file: 要上传的文件,一个 File-object :return: 返回的 JSON 数据包 """ return self._post( url='media/upload', params={ 'type': media_type }, files={ 'media': media_file } )
def function[upload, parameter[self, media_type, media_file]]: constant[ 上传临时素材 详情请参考 http://mp.weixin.qq.com/wiki/5/963fc70b80dc75483a271298a76a8d59.html :param media_type: 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb) :param media_file: 要上传的文件,一个 File-object :return: 返回的 JSON 数据包 ] return[call[name[self]._post, parameter[]]]
keyword[def] identifier[upload] ( identifier[self] , identifier[media_type] , identifier[media_file] ): literal[string] keyword[return] identifier[self] . identifier[_post] ( identifier[url] = literal[string] , identifier[params] ={ literal[string] : identifier[media_type] }, identifier[files] ={ literal[string] : identifier[media_file] } )
def upload(self, media_type, media_file): """ 上传临时素材 详情请参考 http://mp.weixin.qq.com/wiki/5/963fc70b80dc75483a271298a76a8d59.html :param media_type: 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb) :param media_file: 要上传的文件,一个 File-object :return: 返回的 JSON 数据包 """ return self._post(url='media/upload', params={'type': media_type}, files={'media': media_file})
def export_original_data(self): """ Get the original data """ return {key: self.get_original_field_value(key) for key in self.__original_data__.keys()}
def function[export_original_data, parameter[self]]: constant[ Get the original data ] return[<ast.DictComp object at 0x7da1b0b6f700>]
keyword[def] identifier[export_original_data] ( identifier[self] ): literal[string] keyword[return] { identifier[key] : identifier[self] . identifier[get_original_field_value] ( identifier[key] ) keyword[for] identifier[key] keyword[in] identifier[self] . identifier[__original_data__] . identifier[keys] ()}
def export_original_data(self): """ Get the original data """ return {key: self.get_original_field_value(key) for key in self.__original_data__.keys()}
def get(self, name: str, sig: Tuple) -> Optional[object]: """ Return the object representing name if it is cached :param name: name of object :param sig: unique signature of object :return: object if exists and signature matches """ if name not in self._cache: return None if self._cache[name].sig != sig: del self._cache[name] self._update() return None with open(self._cache[name].loc, 'rb') as f: return pickle.load(f)
def function[get, parameter[self, name, sig]]: constant[ Return the object representing name if it is cached :param name: name of object :param sig: unique signature of object :return: object if exists and signature matches ] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self]._cache] begin[:] return[constant[None]] if compare[call[name[self]._cache][name[name]].sig not_equal[!=] name[sig]] begin[:] <ast.Delete object at 0x7da18dc98430> call[name[self]._update, parameter[]] return[constant[None]] with call[name[open], parameter[call[name[self]._cache][name[name]].loc, constant[rb]]] begin[:] return[call[name[pickle].load, parameter[name[f]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[name] : identifier[str] , identifier[sig] : identifier[Tuple] )-> identifier[Optional] [ identifier[object] ]: literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[_cache] : keyword[return] keyword[None] keyword[if] identifier[self] . identifier[_cache] [ identifier[name] ]. identifier[sig] != identifier[sig] : keyword[del] identifier[self] . identifier[_cache] [ identifier[name] ] identifier[self] . identifier[_update] () keyword[return] keyword[None] keyword[with] identifier[open] ( identifier[self] . identifier[_cache] [ identifier[name] ]. identifier[loc] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[pickle] . identifier[load] ( identifier[f] )
def get(self, name: str, sig: Tuple) -> Optional[object]: """ Return the object representing name if it is cached :param name: name of object :param sig: unique signature of object :return: object if exists and signature matches """ if name not in self._cache: return None # depends on [control=['if'], data=[]] if self._cache[name].sig != sig: del self._cache[name] self._update() return None # depends on [control=['if'], data=[]] with open(self._cache[name].loc, 'rb') as f: return pickle.load(f) # depends on [control=['with'], data=['f']]
def GetArchiveTypeIndicators(cls, path_spec, resolver_context=None): """Determines if a file contains a supported archive types. Args: path_spec (PathSpec): path specification. resolver_context (Optional[Context]): resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ if (cls._archive_remainder_list is None or cls._archive_store is None): specification_store, remainder_list = cls._GetSpecificationStore( definitions.FORMAT_CATEGORY_ARCHIVE) cls._archive_remainder_list = remainder_list cls._archive_store = specification_store if cls._archive_scanner is None: cls._archive_scanner = cls._GetSignatureScanner(cls._archive_store) return cls._GetTypeIndicators( cls._archive_scanner, cls._archive_store, cls._archive_remainder_list, path_spec, resolver_context=resolver_context)
def function[GetArchiveTypeIndicators, parameter[cls, path_spec, resolver_context]]: constant[Determines if a file contains a supported archive types. Args: path_spec (PathSpec): path specification. resolver_context (Optional[Context]): resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. ] if <ast.BoolOp object at 0x7da1b07a0400> begin[:] <ast.Tuple object at 0x7da1b07a19f0> assign[=] call[name[cls]._GetSpecificationStore, parameter[name[definitions].FORMAT_CATEGORY_ARCHIVE]] name[cls]._archive_remainder_list assign[=] name[remainder_list] name[cls]._archive_store assign[=] name[specification_store] if compare[name[cls]._archive_scanner is constant[None]] begin[:] name[cls]._archive_scanner assign[=] call[name[cls]._GetSignatureScanner, parameter[name[cls]._archive_store]] return[call[name[cls]._GetTypeIndicators, parameter[name[cls]._archive_scanner, name[cls]._archive_store, name[cls]._archive_remainder_list, name[path_spec]]]]
keyword[def] identifier[GetArchiveTypeIndicators] ( identifier[cls] , identifier[path_spec] , identifier[resolver_context] = keyword[None] ): literal[string] keyword[if] ( identifier[cls] . identifier[_archive_remainder_list] keyword[is] keyword[None] keyword[or] identifier[cls] . identifier[_archive_store] keyword[is] keyword[None] ): identifier[specification_store] , identifier[remainder_list] = identifier[cls] . identifier[_GetSpecificationStore] ( identifier[definitions] . identifier[FORMAT_CATEGORY_ARCHIVE] ) identifier[cls] . identifier[_archive_remainder_list] = identifier[remainder_list] identifier[cls] . identifier[_archive_store] = identifier[specification_store] keyword[if] identifier[cls] . identifier[_archive_scanner] keyword[is] keyword[None] : identifier[cls] . identifier[_archive_scanner] = identifier[cls] . identifier[_GetSignatureScanner] ( identifier[cls] . identifier[_archive_store] ) keyword[return] identifier[cls] . identifier[_GetTypeIndicators] ( identifier[cls] . identifier[_archive_scanner] , identifier[cls] . identifier[_archive_store] , identifier[cls] . identifier[_archive_remainder_list] , identifier[path_spec] , identifier[resolver_context] = identifier[resolver_context] )
def GetArchiveTypeIndicators(cls, path_spec, resolver_context=None): """Determines if a file contains a supported archive types. Args: path_spec (PathSpec): path specification. resolver_context (Optional[Context]): resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators. """ if cls._archive_remainder_list is None or cls._archive_store is None: (specification_store, remainder_list) = cls._GetSpecificationStore(definitions.FORMAT_CATEGORY_ARCHIVE) cls._archive_remainder_list = remainder_list cls._archive_store = specification_store # depends on [control=['if'], data=[]] if cls._archive_scanner is None: cls._archive_scanner = cls._GetSignatureScanner(cls._archive_store) # depends on [control=['if'], data=[]] return cls._GetTypeIndicators(cls._archive_scanner, cls._archive_store, cls._archive_remainder_list, path_spec, resolver_context=resolver_context)
def certify_bool(value, required=True): """ Certifier for boolean values. :param value: The value to be certified. :param bool required: Whether the value can be `None`. Defaults to True. :raises CertifierTypeError: The type is invalid """ if certify_required( value=value, required=required, ): return if not isinstance(value, bool): raise CertifierTypeError( message="expected bool, but value is of type {cls!r}".format( cls=value.__class__.__name__), value=value, required=required, )
def function[certify_bool, parameter[value, required]]: constant[ Certifier for boolean values. :param value: The value to be certified. :param bool required: Whether the value can be `None`. Defaults to True. :raises CertifierTypeError: The type is invalid ] if call[name[certify_required], parameter[]] begin[:] return[None] if <ast.UnaryOp object at 0x7da1b13ceb90> begin[:] <ast.Raise object at 0x7da1b13cfe50>
keyword[def] identifier[certify_bool] ( identifier[value] , identifier[required] = keyword[True] ): literal[string] keyword[if] identifier[certify_required] ( identifier[value] = identifier[value] , identifier[required] = identifier[required] , ): keyword[return] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[bool] ): keyword[raise] identifier[CertifierTypeError] ( identifier[message] = literal[string] . identifier[format] ( identifier[cls] = identifier[value] . identifier[__class__] . identifier[__name__] ), identifier[value] = identifier[value] , identifier[required] = identifier[required] , )
def certify_bool(value, required=True): """ Certifier for boolean values. :param value: The value to be certified. :param bool required: Whether the value can be `None`. Defaults to True. :raises CertifierTypeError: The type is invalid """ if certify_required(value=value, required=required): return # depends on [control=['if'], data=[]] if not isinstance(value, bool): raise CertifierTypeError(message='expected bool, but value is of type {cls!r}'.format(cls=value.__class__.__name__), value=value, required=required) # depends on [control=['if'], data=[]]