code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def get_account_details(account): """ Get the account details. """ result = [] for datastore in _get_datastores(): value = datastore.get_account_details(account) value['datastore'] = datastore.config['DESCRIPTION'] result.append(value) return result
def function[get_account_details, parameter[account]]: constant[ Get the account details. ] variable[result] assign[=] list[[]] for taget[name[datastore]] in starred[call[name[_get_datastores], parameter[]]] begin[:] variable[value] assign[=] call[name[datastore].get_account_details, parameter[name[account]]] call[name[value]][constant[datastore]] assign[=] call[name[datastore].config][constant[DESCRIPTION]] call[name[result].append, parameter[name[value]]] return[name[result]]
keyword[def] identifier[get_account_details] ( identifier[account] ): literal[string] identifier[result] =[] keyword[for] identifier[datastore] keyword[in] identifier[_get_datastores] (): identifier[value] = identifier[datastore] . identifier[get_account_details] ( identifier[account] ) identifier[value] [ literal[string] ]= identifier[datastore] . identifier[config] [ literal[string] ] identifier[result] . identifier[append] ( identifier[value] ) keyword[return] identifier[result]
def get_account_details(account): """ Get the account details. """ result = [] for datastore in _get_datastores(): value = datastore.get_account_details(account) value['datastore'] = datastore.config['DESCRIPTION'] result.append(value) # depends on [control=['for'], data=['datastore']] return result
def doeqdi(x, y, UP=False): """ Takes digitized x,y, data and returns the dec,inc, assuming an equal area projection Parameters __________________ x : array of digitized x from point on equal area projection y : array of igitized y from point on equal area projection UP : if True, is an upper hemisphere projection Output : dec : declination inc : inclination """ xp, yp = y, x # need to switch into geographic convention r = np.sqrt(xp**2+yp**2) z = 1.-r**2 t = np.arcsin(z) if UP == 1: t = -t p = np.arctan2(yp, xp) dec, inc = np.degrees(p) % 360, np.degrees(t) return dec, inc
def function[doeqdi, parameter[x, y, UP]]: constant[ Takes digitized x,y, data and returns the dec,inc, assuming an equal area projection Parameters __________________ x : array of digitized x from point on equal area projection y : array of igitized y from point on equal area projection UP : if True, is an upper hemisphere projection Output : dec : declination inc : inclination ] <ast.Tuple object at 0x7da18f8107c0> assign[=] tuple[[<ast.Name object at 0x7da18f811270>, <ast.Name object at 0x7da18f8132b0>]] variable[r] assign[=] call[name[np].sqrt, parameter[binary_operation[binary_operation[name[xp] ** constant[2]] + binary_operation[name[yp] ** constant[2]]]]] variable[z] assign[=] binary_operation[constant[1.0] - binary_operation[name[r] ** constant[2]]] variable[t] assign[=] call[name[np].arcsin, parameter[name[z]]] if compare[name[UP] equal[==] constant[1]] begin[:] variable[t] assign[=] <ast.UnaryOp object at 0x7da18f8121a0> variable[p] assign[=] call[name[np].arctan2, parameter[name[yp], name[xp]]] <ast.Tuple object at 0x7da18f8129b0> assign[=] tuple[[<ast.BinOp object at 0x7da18f8124a0>, <ast.Call object at 0x7da18f811e70>]] return[tuple[[<ast.Name object at 0x7da18f8115d0>, <ast.Name object at 0x7da18f811ba0>]]]
keyword[def] identifier[doeqdi] ( identifier[x] , identifier[y] , identifier[UP] = keyword[False] ): literal[string] identifier[xp] , identifier[yp] = identifier[y] , identifier[x] identifier[r] = identifier[np] . identifier[sqrt] ( identifier[xp] ** literal[int] + identifier[yp] ** literal[int] ) identifier[z] = literal[int] - identifier[r] ** literal[int] identifier[t] = identifier[np] . identifier[arcsin] ( identifier[z] ) keyword[if] identifier[UP] == literal[int] : identifier[t] =- identifier[t] identifier[p] = identifier[np] . identifier[arctan2] ( identifier[yp] , identifier[xp] ) identifier[dec] , identifier[inc] = identifier[np] . identifier[degrees] ( identifier[p] )% literal[int] , identifier[np] . identifier[degrees] ( identifier[t] ) keyword[return] identifier[dec] , identifier[inc]
def doeqdi(x, y, UP=False): """ Takes digitized x,y, data and returns the dec,inc, assuming an equal area projection Parameters __________________ x : array of digitized x from point on equal area projection y : array of igitized y from point on equal area projection UP : if True, is an upper hemisphere projection Output : dec : declination inc : inclination """ (xp, yp) = (y, x) # need to switch into geographic convention r = np.sqrt(xp ** 2 + yp ** 2) z = 1.0 - r ** 2 t = np.arcsin(z) if UP == 1: t = -t # depends on [control=['if'], data=[]] p = np.arctan2(yp, xp) (dec, inc) = (np.degrees(p) % 360, np.degrees(t)) return (dec, inc)
def is_legal_subject(self, c: OntologyClass) -> bool: """ is_legal_subject(c) = true if - c in included_domains(self) or - super_classes_closure(c) intersection included_domains(self) is not empty There is no need to check the included_domains(super_properties_closure(self)) because included_domains(super_properties_closure(self)) is subset of super_classes_closure(included_domains(self)) Args: c: Returns: """ domains = self.included_domains() return c and (not domains or c in domains or c.super_classes_closure() & domains)
def function[is_legal_subject, parameter[self, c]]: constant[ is_legal_subject(c) = true if - c in included_domains(self) or - super_classes_closure(c) intersection included_domains(self) is not empty There is no need to check the included_domains(super_properties_closure(self)) because included_domains(super_properties_closure(self)) is subset of super_classes_closure(included_domains(self)) Args: c: Returns: ] variable[domains] assign[=] call[name[self].included_domains, parameter[]] return[<ast.BoolOp object at 0x7da1b0b59330>]
keyword[def] identifier[is_legal_subject] ( identifier[self] , identifier[c] : identifier[OntologyClass] )-> identifier[bool] : literal[string] identifier[domains] = identifier[self] . identifier[included_domains] () keyword[return] identifier[c] keyword[and] ( keyword[not] identifier[domains] keyword[or] identifier[c] keyword[in] identifier[domains] keyword[or] identifier[c] . identifier[super_classes_closure] ()& identifier[domains] )
def is_legal_subject(self, c: OntologyClass) -> bool: """ is_legal_subject(c) = true if - c in included_domains(self) or - super_classes_closure(c) intersection included_domains(self) is not empty There is no need to check the included_domains(super_properties_closure(self)) because included_domains(super_properties_closure(self)) is subset of super_classes_closure(included_domains(self)) Args: c: Returns: """ domains = self.included_domains() return c and (not domains or c in domains or c.super_classes_closure() & domains)
def grouped_count_sizes(fileslist, fgrouped): # pragma: no cover '''Compute the total size per group and total number of files. Useful to check that everything is OK.''' fsizes = {} total_files = 0 allitems = None if isinstance(fgrouped, dict): allitems = fgrouped.iteritems() elif isinstance(fgrouped, list): allitems = enumerate(fgrouped) for fkey, cluster in allitems: fsizes[fkey] = [] for subcluster in cluster: tot = 0 if subcluster is not None: for fname in subcluster: tot += fileslist[fname] total_files += 1 fsizes[fkey].append(tot) return fsizes, total_files
def function[grouped_count_sizes, parameter[fileslist, fgrouped]]: constant[Compute the total size per group and total number of files. Useful to check that everything is OK.] variable[fsizes] assign[=] dictionary[[], []] variable[total_files] assign[=] constant[0] variable[allitems] assign[=] constant[None] if call[name[isinstance], parameter[name[fgrouped], name[dict]]] begin[:] variable[allitems] assign[=] call[name[fgrouped].iteritems, parameter[]] for taget[tuple[[<ast.Name object at 0x7da18eb550f0>, <ast.Name object at 0x7da18eb56a70>]]] in starred[name[allitems]] begin[:] call[name[fsizes]][name[fkey]] assign[=] list[[]] for taget[name[subcluster]] in starred[name[cluster]] begin[:] variable[tot] assign[=] constant[0] if compare[name[subcluster] is_not constant[None]] begin[:] for taget[name[fname]] in starred[name[subcluster]] begin[:] <ast.AugAssign object at 0x7da18eb55390> <ast.AugAssign object at 0x7da18eb559c0> call[call[name[fsizes]][name[fkey]].append, parameter[name[tot]]] return[tuple[[<ast.Name object at 0x7da18eb54640>, <ast.Name object at 0x7da18eb54550>]]]
keyword[def] identifier[grouped_count_sizes] ( identifier[fileslist] , identifier[fgrouped] ): literal[string] identifier[fsizes] ={} identifier[total_files] = literal[int] identifier[allitems] = keyword[None] keyword[if] identifier[isinstance] ( identifier[fgrouped] , identifier[dict] ): identifier[allitems] = identifier[fgrouped] . identifier[iteritems] () keyword[elif] identifier[isinstance] ( identifier[fgrouped] , identifier[list] ): identifier[allitems] = identifier[enumerate] ( identifier[fgrouped] ) keyword[for] identifier[fkey] , identifier[cluster] keyword[in] identifier[allitems] : identifier[fsizes] [ identifier[fkey] ]=[] keyword[for] identifier[subcluster] keyword[in] identifier[cluster] : identifier[tot] = literal[int] keyword[if] identifier[subcluster] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[fname] keyword[in] identifier[subcluster] : identifier[tot] += identifier[fileslist] [ identifier[fname] ] identifier[total_files] += literal[int] identifier[fsizes] [ identifier[fkey] ]. identifier[append] ( identifier[tot] ) keyword[return] identifier[fsizes] , identifier[total_files]
def grouped_count_sizes(fileslist, fgrouped): # pragma: no cover 'Compute the total size per group and total number of files. Useful to check that everything is OK.' fsizes = {} total_files = 0 allitems = None if isinstance(fgrouped, dict): allitems = fgrouped.iteritems() # depends on [control=['if'], data=[]] elif isinstance(fgrouped, list): allitems = enumerate(fgrouped) # depends on [control=['if'], data=[]] for (fkey, cluster) in allitems: fsizes[fkey] = [] for subcluster in cluster: tot = 0 if subcluster is not None: for fname in subcluster: tot += fileslist[fname] total_files += 1 # depends on [control=['for'], data=['fname']] # depends on [control=['if'], data=['subcluster']] fsizes[fkey].append(tot) # depends on [control=['for'], data=['subcluster']] # depends on [control=['for'], data=[]] return (fsizes, total_files)
def add_condition(self, condition: z3.BoolRef) -> None: """ Add condition to the dependence map :param condition: The condition that is to be added to the dependence map """ variables = set(_get_expr_variables(condition)) relevant_buckets = set() for variable in variables: try: bucket = self.variable_map[str(variable)] relevant_buckets.add(bucket) except KeyError: continue new_bucket = DependenceBucket(variables, [condition]) self.buckets.append(new_bucket) if relevant_buckets: # Merge buckets, and rewrite variable map accordingly relevant_buckets.add(new_bucket) new_bucket = self._merge_buckets(relevant_buckets) for variable in new_bucket.variables: self.variable_map[str(variable)] = new_bucket
def function[add_condition, parameter[self, condition]]: constant[ Add condition to the dependence map :param condition: The condition that is to be added to the dependence map ] variable[variables] assign[=] call[name[set], parameter[call[name[_get_expr_variables], parameter[name[condition]]]]] variable[relevant_buckets] assign[=] call[name[set], parameter[]] for taget[name[variable]] in starred[name[variables]] begin[:] <ast.Try object at 0x7da1b1df6980> variable[new_bucket] assign[=] call[name[DependenceBucket], parameter[name[variables], list[[<ast.Name object at 0x7da1b1df6440>]]]] call[name[self].buckets.append, parameter[name[new_bucket]]] if name[relevant_buckets] begin[:] call[name[relevant_buckets].add, parameter[name[new_bucket]]] variable[new_bucket] assign[=] call[name[self]._merge_buckets, parameter[name[relevant_buckets]]] for taget[name[variable]] in starred[name[new_bucket].variables] begin[:] call[name[self].variable_map][call[name[str], parameter[name[variable]]]] assign[=] name[new_bucket]
keyword[def] identifier[add_condition] ( identifier[self] , identifier[condition] : identifier[z3] . identifier[BoolRef] )-> keyword[None] : literal[string] identifier[variables] = identifier[set] ( identifier[_get_expr_variables] ( identifier[condition] )) identifier[relevant_buckets] = identifier[set] () keyword[for] identifier[variable] keyword[in] identifier[variables] : keyword[try] : identifier[bucket] = identifier[self] . identifier[variable_map] [ identifier[str] ( identifier[variable] )] identifier[relevant_buckets] . identifier[add] ( identifier[bucket] ) keyword[except] identifier[KeyError] : keyword[continue] identifier[new_bucket] = identifier[DependenceBucket] ( identifier[variables] ,[ identifier[condition] ]) identifier[self] . identifier[buckets] . identifier[append] ( identifier[new_bucket] ) keyword[if] identifier[relevant_buckets] : identifier[relevant_buckets] . identifier[add] ( identifier[new_bucket] ) identifier[new_bucket] = identifier[self] . identifier[_merge_buckets] ( identifier[relevant_buckets] ) keyword[for] identifier[variable] keyword[in] identifier[new_bucket] . identifier[variables] : identifier[self] . identifier[variable_map] [ identifier[str] ( identifier[variable] )]= identifier[new_bucket]
def add_condition(self, condition: z3.BoolRef) -> None: """ Add condition to the dependence map :param condition: The condition that is to be added to the dependence map """ variables = set(_get_expr_variables(condition)) relevant_buckets = set() for variable in variables: try: bucket = self.variable_map[str(variable)] relevant_buckets.add(bucket) # depends on [control=['try'], data=[]] except KeyError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['variable']] new_bucket = DependenceBucket(variables, [condition]) self.buckets.append(new_bucket) if relevant_buckets: # Merge buckets, and rewrite variable map accordingly relevant_buckets.add(new_bucket) new_bucket = self._merge_buckets(relevant_buckets) # depends on [control=['if'], data=[]] for variable in new_bucket.variables: self.variable_map[str(variable)] = new_bucket # depends on [control=['for'], data=['variable']]
def looking_for(self): """Copy looking for attributes from the source profile to the destination profile. """ looking_for = self.source_profile.looking_for return self.dest_user.profile.looking_for.update( gentation=looking_for.gentation, single=looking_for.single, near_me=looking_for.near_me, kinds=looking_for.kinds, ages=looking_for.ages )
def function[looking_for, parameter[self]]: constant[Copy looking for attributes from the source profile to the destination profile. ] variable[looking_for] assign[=] name[self].source_profile.looking_for return[call[name[self].dest_user.profile.looking_for.update, parameter[]]]
keyword[def] identifier[looking_for] ( identifier[self] ): literal[string] identifier[looking_for] = identifier[self] . identifier[source_profile] . identifier[looking_for] keyword[return] identifier[self] . identifier[dest_user] . identifier[profile] . identifier[looking_for] . identifier[update] ( identifier[gentation] = identifier[looking_for] . identifier[gentation] , identifier[single] = identifier[looking_for] . identifier[single] , identifier[near_me] = identifier[looking_for] . identifier[near_me] , identifier[kinds] = identifier[looking_for] . identifier[kinds] , identifier[ages] = identifier[looking_for] . identifier[ages] )
def looking_for(self): """Copy looking for attributes from the source profile to the destination profile. """ looking_for = self.source_profile.looking_for return self.dest_user.profile.looking_for.update(gentation=looking_for.gentation, single=looking_for.single, near_me=looking_for.near_me, kinds=looking_for.kinds, ages=looking_for.ages)
def list_upgrades(refresh=True, **kwargs): # pylint: disable=W0613 ''' Check whether or not an upgrade is available for all packages CLI Example: .. code-block:: bash salt '*' pkg.list_upgrades ''' if refresh: refresh_db() res = _call_brew('outdated --json=v1') ret = {} try: data = salt.utils.json.loads(res['stdout']) except ValueError as err: msg = 'unable to interpret output from "brew outdated": {0}'.format(err) log.error(msg) raise CommandExecutionError(msg) for pkg in data: # current means latest available to brew ret[pkg['name']] = pkg['current_version'] return ret
def function[list_upgrades, parameter[refresh]]: constant[ Check whether or not an upgrade is available for all packages CLI Example: .. code-block:: bash salt '*' pkg.list_upgrades ] if name[refresh] begin[:] call[name[refresh_db], parameter[]] variable[res] assign[=] call[name[_call_brew], parameter[constant[outdated --json=v1]]] variable[ret] assign[=] dictionary[[], []] <ast.Try object at 0x7da1b1c33f40> for taget[name[pkg]] in starred[name[data]] begin[:] call[name[ret]][call[name[pkg]][constant[name]]] assign[=] call[name[pkg]][constant[current_version]] return[name[ret]]
keyword[def] identifier[list_upgrades] ( identifier[refresh] = keyword[True] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[refresh] : identifier[refresh_db] () identifier[res] = identifier[_call_brew] ( literal[string] ) identifier[ret] ={} keyword[try] : identifier[data] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[res] [ literal[string] ]) keyword[except] identifier[ValueError] keyword[as] identifier[err] : identifier[msg] = literal[string] . identifier[format] ( identifier[err] ) identifier[log] . identifier[error] ( identifier[msg] ) keyword[raise] identifier[CommandExecutionError] ( identifier[msg] ) keyword[for] identifier[pkg] keyword[in] identifier[data] : identifier[ret] [ identifier[pkg] [ literal[string] ]]= identifier[pkg] [ literal[string] ] keyword[return] identifier[ret]
def list_upgrades(refresh=True, **kwargs): # pylint: disable=W0613 "\n Check whether or not an upgrade is available for all packages\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' pkg.list_upgrades\n " if refresh: refresh_db() # depends on [control=['if'], data=[]] res = _call_brew('outdated --json=v1') ret = {} try: data = salt.utils.json.loads(res['stdout']) # depends on [control=['try'], data=[]] except ValueError as err: msg = 'unable to interpret output from "brew outdated": {0}'.format(err) log.error(msg) raise CommandExecutionError(msg) # depends on [control=['except'], data=['err']] for pkg in data: # current means latest available to brew ret[pkg['name']] = pkg['current_version'] # depends on [control=['for'], data=['pkg']] return ret
def update(self, pointvol): """Update the bounding ellipsoid using the current set of live points.""" # Check if we should use the provided pool for updating. if self.use_pool_update: pool = self.pool else: pool = None # Update the ellipsoid. self.ell.update(self.live_u, pointvol=pointvol, rstate=self.rstate, bootstrap=self.bootstrap, pool=pool) if self.enlarge != 1.: self.ell.scale_to_vol(self.ell.vol * self.enlarge) return copy.deepcopy(self.ell)
def function[update, parameter[self, pointvol]]: constant[Update the bounding ellipsoid using the current set of live points.] if name[self].use_pool_update begin[:] variable[pool] assign[=] name[self].pool call[name[self].ell.update, parameter[name[self].live_u]] if compare[name[self].enlarge not_equal[!=] constant[1.0]] begin[:] call[name[self].ell.scale_to_vol, parameter[binary_operation[name[self].ell.vol * name[self].enlarge]]] return[call[name[copy].deepcopy, parameter[name[self].ell]]]
keyword[def] identifier[update] ( identifier[self] , identifier[pointvol] ): literal[string] keyword[if] identifier[self] . identifier[use_pool_update] : identifier[pool] = identifier[self] . identifier[pool] keyword[else] : identifier[pool] = keyword[None] identifier[self] . identifier[ell] . identifier[update] ( identifier[self] . identifier[live_u] , identifier[pointvol] = identifier[pointvol] , identifier[rstate] = identifier[self] . identifier[rstate] , identifier[bootstrap] = identifier[self] . identifier[bootstrap] , identifier[pool] = identifier[pool] ) keyword[if] identifier[self] . identifier[enlarge] != literal[int] : identifier[self] . identifier[ell] . identifier[scale_to_vol] ( identifier[self] . identifier[ell] . identifier[vol] * identifier[self] . identifier[enlarge] ) keyword[return] identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[ell] )
def update(self, pointvol): """Update the bounding ellipsoid using the current set of live points.""" # Check if we should use the provided pool for updating. if self.use_pool_update: pool = self.pool # depends on [control=['if'], data=[]] else: pool = None # Update the ellipsoid. self.ell.update(self.live_u, pointvol=pointvol, rstate=self.rstate, bootstrap=self.bootstrap, pool=pool) if self.enlarge != 1.0: self.ell.scale_to_vol(self.ell.vol * self.enlarge) # depends on [control=['if'], data=[]] return copy.deepcopy(self.ell)
def get_allowed_reset_keys_values(self): """Get the allowed values for resetting the system. :returns: A set with the allowed values. """ reset_keys_action = self._get_reset_keys_action_element() if not reset_keys_action.allowed_values: LOG.warning('Could not figure out the allowed values for the ' 'reset keys in secure boot %s', self.path) return set(mappings.SECUREBOOT_RESET_KEYS_MAP_REV) return set([mappings.SECUREBOOT_RESET_KEYS_MAP[v] for v in set(mappings.SECUREBOOT_RESET_KEYS_MAP). intersection(reset_keys_action.allowed_values)])
def function[get_allowed_reset_keys_values, parameter[self]]: constant[Get the allowed values for resetting the system. :returns: A set with the allowed values. ] variable[reset_keys_action] assign[=] call[name[self]._get_reset_keys_action_element, parameter[]] if <ast.UnaryOp object at 0x7da1b19b1720> begin[:] call[name[LOG].warning, parameter[constant[Could not figure out the allowed values for the reset keys in secure boot %s], name[self].path]] return[call[name[set], parameter[name[mappings].SECUREBOOT_RESET_KEYS_MAP_REV]]] return[call[name[set], parameter[<ast.ListComp object at 0x7da1b1a6e560>]]]
keyword[def] identifier[get_allowed_reset_keys_values] ( identifier[self] ): literal[string] identifier[reset_keys_action] = identifier[self] . identifier[_get_reset_keys_action_element] () keyword[if] keyword[not] identifier[reset_keys_action] . identifier[allowed_values] : identifier[LOG] . identifier[warning] ( literal[string] literal[string] , identifier[self] . identifier[path] ) keyword[return] identifier[set] ( identifier[mappings] . identifier[SECUREBOOT_RESET_KEYS_MAP_REV] ) keyword[return] identifier[set] ([ identifier[mappings] . identifier[SECUREBOOT_RESET_KEYS_MAP] [ identifier[v] ] keyword[for] identifier[v] keyword[in] identifier[set] ( identifier[mappings] . identifier[SECUREBOOT_RESET_KEYS_MAP] ). identifier[intersection] ( identifier[reset_keys_action] . identifier[allowed_values] )])
def get_allowed_reset_keys_values(self): """Get the allowed values for resetting the system. :returns: A set with the allowed values. """ reset_keys_action = self._get_reset_keys_action_element() if not reset_keys_action.allowed_values: LOG.warning('Could not figure out the allowed values for the reset keys in secure boot %s', self.path) return set(mappings.SECUREBOOT_RESET_KEYS_MAP_REV) # depends on [control=['if'], data=[]] return set([mappings.SECUREBOOT_RESET_KEYS_MAP[v] for v in set(mappings.SECUREBOOT_RESET_KEYS_MAP).intersection(reset_keys_action.allowed_values)])
def _set_version(self, v, load=False): """ Setter method for version, mapped from YANG variable /rbridge_id/openflow/logical_instance/version (list) If this variable is read-only (config: false) in the source YANG file, then _set_version is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_version() directly. YANG Description: OpenFlow version """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("version_name",version.version, yang_name="version", rest_name="version", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='version-name', extensions={u'tailf-common': {u'callpoint': u'OpenFlowSupportedVersions', u'info': u'OpenFlow version', u'cli-suppress-mode': None}}), is_container='list', yang_name="version", rest_name="version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'OpenFlowSupportedVersions', u'info': u'OpenFlow version', u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """version must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("version_name",version.version, yang_name="version", rest_name="version", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='version-name', extensions={u'tailf-common': {u'callpoint': u'OpenFlowSupportedVersions', u'info': u'OpenFlow version', u'cli-suppress-mode': None}}), is_container='list', yang_name="version", rest_name="version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'OpenFlowSupportedVersions', u'info': u'OpenFlow version', u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='list', is_config=True)""", }) self.__version = t if hasattr(self, '_set'): self._set()
def function[_set_version, parameter[self, v, load]]: constant[ Setter method for version, mapped from YANG variable /rbridge_id/openflow/logical_instance/version (list) If this variable is read-only (config: false) in the source YANG file, then _set_version is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_version() directly. YANG Description: OpenFlow version ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18bcc9300> name[self].__version assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_version] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[YANGListType] ( literal[string] , identifier[version] . identifier[version] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[is_container] = literal[string] , identifier[user_ordered] = keyword[False] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[yang_keys] = literal[string] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] }}), identifier[is_container] = literal[string] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__version] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_version(self, v, load=False): """ Setter method for version, mapped from YANG variable /rbridge_id/openflow/logical_instance/version (list) If this variable is read-only (config: false) in the source YANG file, then _set_version is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_version() directly. YANG Description: OpenFlow version """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=YANGListType('version_name', version.version, yang_name='version', rest_name='version', parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='version-name', extensions={u'tailf-common': {u'callpoint': u'OpenFlowSupportedVersions', u'info': u'OpenFlow version', u'cli-suppress-mode': None}}), is_container='list', yang_name='version', rest_name='version', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'OpenFlowSupportedVersions', u'info': u'OpenFlow version', u'cli-suppress-mode': None}}, namespace='urn:brocade.com:mgmt:brocade-openflow', defining_module='brocade-openflow', yang_type='list', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'version must be of a type compatible with list', 'defined-type': 'list', 'generated-type': 'YANGDynClass(base=YANGListType("version_name",version.version, yang_name="version", rest_name="version", parent=self, is_container=\'list\', user_ordered=False, path_helper=self._path_helper, yang_keys=\'version-name\', extensions={u\'tailf-common\': {u\'callpoint\': u\'OpenFlowSupportedVersions\', u\'info\': u\'OpenFlow version\', u\'cli-suppress-mode\': None}}), is_container=\'list\', yang_name="version", rest_name="version", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'OpenFlowSupportedVersions\', u\'info\': u\'OpenFlow version\', u\'cli-suppress-mode\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-openflow\', defining_module=\'brocade-openflow\', yang_type=\'list\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__version = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def imagetransformer_moe_tiny(): """Set of hyperparameters for a very small imagetransformer with MoE.""" hparams = imagetransformer_tiny() hparams.hidden_size = 64 hparams.batch_size = 1 hparams.num_hidden_layers = 3 hparams.dec_attention_type = cia.AttentionType.MOE_LOCAL_1D hparams.add_hparam("moe_layers_decoder", "1") # Which layer is MoE. hparams.moe_hidden_sizes = "1024" # Hidden layer sizes (comma-separated). hparams.moe_num_experts = 16 # Number of experts in each MoE layer. hparams.moe_k = 2 # How many experts to use per batch element (try 2 or 4). hparams.moe_loss_coef = 1e-2 # MoE loss coefficient (1e-2 is usually ok). return hparams
def function[imagetransformer_moe_tiny, parameter[]]: constant[Set of hyperparameters for a very small imagetransformer with MoE.] variable[hparams] assign[=] call[name[imagetransformer_tiny], parameter[]] name[hparams].hidden_size assign[=] constant[64] name[hparams].batch_size assign[=] constant[1] name[hparams].num_hidden_layers assign[=] constant[3] name[hparams].dec_attention_type assign[=] name[cia].AttentionType.MOE_LOCAL_1D call[name[hparams].add_hparam, parameter[constant[moe_layers_decoder], constant[1]]] name[hparams].moe_hidden_sizes assign[=] constant[1024] name[hparams].moe_num_experts assign[=] constant[16] name[hparams].moe_k assign[=] constant[2] name[hparams].moe_loss_coef assign[=] constant[0.01] return[name[hparams]]
keyword[def] identifier[imagetransformer_moe_tiny] (): literal[string] identifier[hparams] = identifier[imagetransformer_tiny] () identifier[hparams] . identifier[hidden_size] = literal[int] identifier[hparams] . identifier[batch_size] = literal[int] identifier[hparams] . identifier[num_hidden_layers] = literal[int] identifier[hparams] . identifier[dec_attention_type] = identifier[cia] . identifier[AttentionType] . identifier[MOE_LOCAL_1D] identifier[hparams] . identifier[add_hparam] ( literal[string] , literal[string] ) identifier[hparams] . identifier[moe_hidden_sizes] = literal[string] identifier[hparams] . identifier[moe_num_experts] = literal[int] identifier[hparams] . identifier[moe_k] = literal[int] identifier[hparams] . identifier[moe_loss_coef] = literal[int] keyword[return] identifier[hparams]
def imagetransformer_moe_tiny(): """Set of hyperparameters for a very small imagetransformer with MoE.""" hparams = imagetransformer_tiny() hparams.hidden_size = 64 hparams.batch_size = 1 hparams.num_hidden_layers = 3 hparams.dec_attention_type = cia.AttentionType.MOE_LOCAL_1D hparams.add_hparam('moe_layers_decoder', '1') # Which layer is MoE. hparams.moe_hidden_sizes = '1024' # Hidden layer sizes (comma-separated). hparams.moe_num_experts = 16 # Number of experts in each MoE layer. hparams.moe_k = 2 # How many experts to use per batch element (try 2 or 4). hparams.moe_loss_coef = 0.01 # MoE loss coefficient (1e-2 is usually ok). return hparams
def load_tf_weights_in_bert(model, tf_checkpoint_path): """ Load tf checkpoints in a pytorch model """ try: import re import numpy as np import tensorflow as tf except ImportError: print("Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see " "https://www.tensorflow.org/install/ for installation instructions.") raise tf_path = os.path.abspath(tf_checkpoint_path) print("Converting TensorFlow checkpoint from {}".format(tf_path)) # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] arrays = [] for name, shape in init_vars: print("Loading TF weight {} with shape {}".format(name, shape)) array = tf.train.load_variable(tf_path, name) names.append(name) arrays.append(array) for name, array in zip(names, arrays): name = name.split('/') # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if any(n in ["adam_v", "adam_m", "global_step"] for n in name): print("Skipping {}".format("/".join(name))) continue pointer = model for m_name in name: if re.fullmatch(r'[A-Za-z]+_\d+', m_name): l = re.split(r'_(\d+)', m_name) else: l = [m_name] if l[0] == 'kernel' or l[0] == 'gamma': pointer = getattr(pointer, 'weight') elif l[0] == 'output_bias' or l[0] == 'beta': pointer = getattr(pointer, 'bias') elif l[0] == 'output_weights': pointer = getattr(pointer, 'weight') elif l[0] == 'squad': pointer = getattr(pointer, 'classifier') else: try: pointer = getattr(pointer, l[0]) except AttributeError: print("Skipping {}".format("/".join(name))) continue if len(l) >= 2: num = int(l[1]) pointer = pointer[num] if m_name[-11:] == '_embeddings': pointer = getattr(pointer, 'weight') elif m_name == 'kernel': array = np.transpose(array) try: assert pointer.shape == array.shape except AssertionError as e: e.args += (pointer.shape, array.shape) raise print("Initialize PyTorch weight {}".format(name)) pointer.data = torch.from_numpy(array) return model
def function[load_tf_weights_in_bert, parameter[model, tf_checkpoint_path]]: constant[ Load tf checkpoints in a pytorch model ] <ast.Try object at 0x7da1b20a9a20> variable[tf_path] assign[=] call[name[os].path.abspath, parameter[name[tf_checkpoint_path]]] call[name[print], parameter[call[constant[Converting TensorFlow checkpoint from {}].format, parameter[name[tf_path]]]]] variable[init_vars] assign[=] call[name[tf].train.list_variables, parameter[name[tf_path]]] variable[names] assign[=] list[[]] variable[arrays] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b20a8a60>, <ast.Name object at 0x7da1b20aac50>]]] in starred[name[init_vars]] begin[:] call[name[print], parameter[call[constant[Loading TF weight {} with shape {}].format, parameter[name[name], name[shape]]]]] variable[array] assign[=] call[name[tf].train.load_variable, parameter[name[tf_path], name[name]]] call[name[names].append, parameter[name[name]]] call[name[arrays].append, parameter[name[array]]] for taget[tuple[[<ast.Name object at 0x7da1b20a9c60>, <ast.Name object at 0x7da1b20a8f40>]]] in starred[call[name[zip], parameter[name[names], name[arrays]]]] begin[:] variable[name] assign[=] call[name[name].split, parameter[constant[/]]] if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b20a9b40>]] begin[:] call[name[print], parameter[call[constant[Skipping {}].format, parameter[call[constant[/].join, parameter[name[name]]]]]]] continue variable[pointer] assign[=] name[model] for taget[name[m_name]] in starred[name[name]] begin[:] if call[name[re].fullmatch, parameter[constant[[A-Za-z]+_\d+], name[m_name]]] begin[:] variable[l] assign[=] call[name[re].split, parameter[constant[_(\d+)], name[m_name]]] if <ast.BoolOp object at 0x7da18f58e200> begin[:] variable[pointer] assign[=] call[name[getattr], parameter[name[pointer], constant[weight]]] if compare[call[name[len], parameter[name[l]]] greater_or_equal[>=] constant[2]] begin[:] variable[num] assign[=] call[name[int], parameter[call[name[l]][constant[1]]]] variable[pointer] assign[=] call[name[pointer]][name[num]] if compare[call[name[m_name]][<ast.Slice object at 0x7da20c76ecb0>] equal[==] constant[_embeddings]] begin[:] variable[pointer] assign[=] call[name[getattr], parameter[name[pointer], constant[weight]]] <ast.Try object at 0x7da237eefa90> call[name[print], parameter[call[constant[Initialize PyTorch weight {}].format, parameter[name[name]]]]] name[pointer].data assign[=] call[name[torch].from_numpy, parameter[name[array]]] return[name[model]]
keyword[def] identifier[load_tf_weights_in_bert] ( identifier[model] , identifier[tf_checkpoint_path] ): literal[string] keyword[try] : keyword[import] identifier[re] keyword[import] identifier[numpy] keyword[as] identifier[np] keyword[import] identifier[tensorflow] keyword[as] identifier[tf] keyword[except] identifier[ImportError] : identifier[print] ( literal[string] literal[string] ) keyword[raise] identifier[tf_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[tf_checkpoint_path] ) identifier[print] ( literal[string] . identifier[format] ( identifier[tf_path] )) identifier[init_vars] = identifier[tf] . identifier[train] . identifier[list_variables] ( identifier[tf_path] ) identifier[names] =[] identifier[arrays] =[] keyword[for] identifier[name] , identifier[shape] keyword[in] identifier[init_vars] : identifier[print] ( literal[string] . identifier[format] ( identifier[name] , identifier[shape] )) identifier[array] = identifier[tf] . identifier[train] . identifier[load_variable] ( identifier[tf_path] , identifier[name] ) identifier[names] . identifier[append] ( identifier[name] ) identifier[arrays] . identifier[append] ( identifier[array] ) keyword[for] identifier[name] , identifier[array] keyword[in] identifier[zip] ( identifier[names] , identifier[arrays] ): identifier[name] = identifier[name] . identifier[split] ( literal[string] ) keyword[if] identifier[any] ( identifier[n] keyword[in] [ literal[string] , literal[string] , literal[string] ] keyword[for] identifier[n] keyword[in] identifier[name] ): identifier[print] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[name] ))) keyword[continue] identifier[pointer] = identifier[model] keyword[for] identifier[m_name] keyword[in] identifier[name] : keyword[if] identifier[re] . identifier[fullmatch] ( literal[string] , identifier[m_name] ): identifier[l] = identifier[re] . identifier[split] ( literal[string] , identifier[m_name] ) keyword[else] : identifier[l] =[ identifier[m_name] ] keyword[if] identifier[l] [ literal[int] ]== literal[string] keyword[or] identifier[l] [ literal[int] ]== literal[string] : identifier[pointer] = identifier[getattr] ( identifier[pointer] , literal[string] ) keyword[elif] identifier[l] [ literal[int] ]== literal[string] keyword[or] identifier[l] [ literal[int] ]== literal[string] : identifier[pointer] = identifier[getattr] ( identifier[pointer] , literal[string] ) keyword[elif] identifier[l] [ literal[int] ]== literal[string] : identifier[pointer] = identifier[getattr] ( identifier[pointer] , literal[string] ) keyword[elif] identifier[l] [ literal[int] ]== literal[string] : identifier[pointer] = identifier[getattr] ( identifier[pointer] , literal[string] ) keyword[else] : keyword[try] : identifier[pointer] = identifier[getattr] ( identifier[pointer] , identifier[l] [ literal[int] ]) keyword[except] identifier[AttributeError] : identifier[print] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[name] ))) keyword[continue] keyword[if] identifier[len] ( identifier[l] )>= literal[int] : identifier[num] = identifier[int] ( identifier[l] [ literal[int] ]) identifier[pointer] = identifier[pointer] [ identifier[num] ] keyword[if] identifier[m_name] [- literal[int] :]== literal[string] : identifier[pointer] = identifier[getattr] ( identifier[pointer] , literal[string] ) keyword[elif] identifier[m_name] == literal[string] : identifier[array] = identifier[np] . identifier[transpose] ( identifier[array] ) keyword[try] : keyword[assert] identifier[pointer] . identifier[shape] == identifier[array] . identifier[shape] keyword[except] identifier[AssertionError] keyword[as] identifier[e] : identifier[e] . identifier[args] +=( identifier[pointer] . identifier[shape] , identifier[array] . identifier[shape] ) keyword[raise] identifier[print] ( literal[string] . identifier[format] ( identifier[name] )) identifier[pointer] . identifier[data] = identifier[torch] . identifier[from_numpy] ( identifier[array] ) keyword[return] identifier[model]
def load_tf_weights_in_bert(model, tf_checkpoint_path): """ Load tf checkpoints in a pytorch model """ try: import re import numpy as np import tensorflow as tf # depends on [control=['try'], data=[]] except ImportError: print('Loading a TensorFlow models in PyTorch, requires TensorFlow to be installed. Please see https://www.tensorflow.org/install/ for installation instructions.') raise # depends on [control=['except'], data=[]] tf_path = os.path.abspath(tf_checkpoint_path) print('Converting TensorFlow checkpoint from {}'.format(tf_path)) # Load weights from TF model init_vars = tf.train.list_variables(tf_path) names = [] arrays = [] for (name, shape) in init_vars: print('Loading TF weight {} with shape {}'.format(name, shape)) array = tf.train.load_variable(tf_path, name) names.append(name) arrays.append(array) # depends on [control=['for'], data=[]] for (name, array) in zip(names, arrays): name = name.split('/') # adam_v and adam_m are variables used in AdamWeightDecayOptimizer to calculated m and v # which are not required for using pretrained model if any((n in ['adam_v', 'adam_m', 'global_step'] for n in name)): print('Skipping {}'.format('/'.join(name))) continue # depends on [control=['if'], data=[]] pointer = model for m_name in name: if re.fullmatch('[A-Za-z]+_\\d+', m_name): l = re.split('_(\\d+)', m_name) # depends on [control=['if'], data=[]] else: l = [m_name] if l[0] == 'kernel' or l[0] == 'gamma': pointer = getattr(pointer, 'weight') # depends on [control=['if'], data=[]] elif l[0] == 'output_bias' or l[0] == 'beta': pointer = getattr(pointer, 'bias') # depends on [control=['if'], data=[]] elif l[0] == 'output_weights': pointer = getattr(pointer, 'weight') # depends on [control=['if'], data=[]] elif l[0] == 'squad': pointer = getattr(pointer, 'classifier') # depends on [control=['if'], data=[]] else: try: pointer = getattr(pointer, l[0]) # depends on [control=['try'], data=[]] except AttributeError: print('Skipping {}'.format('/'.join(name))) continue # depends on [control=['except'], data=[]] if len(l) >= 2: num = int(l[1]) pointer = pointer[num] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m_name']] if m_name[-11:] == '_embeddings': pointer = getattr(pointer, 'weight') # depends on [control=['if'], data=[]] elif m_name == 'kernel': array = np.transpose(array) # depends on [control=['if'], data=[]] try: assert pointer.shape == array.shape # depends on [control=['try'], data=[]] except AssertionError as e: e.args += (pointer.shape, array.shape) raise # depends on [control=['except'], data=['e']] print('Initialize PyTorch weight {}'.format(name)) pointer.data = torch.from_numpy(array) # depends on [control=['for'], data=[]] return model
def native_container(self): """Native container object.""" if self.__native is None: self.__native = self._get_container() return self.__native
def function[native_container, parameter[self]]: constant[Native container object.] if compare[name[self].__native is constant[None]] begin[:] name[self].__native assign[=] call[name[self]._get_container, parameter[]] return[name[self].__native]
keyword[def] identifier[native_container] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[__native] keyword[is] keyword[None] : identifier[self] . identifier[__native] = identifier[self] . identifier[_get_container] () keyword[return] identifier[self] . identifier[__native]
def native_container(self): """Native container object.""" if self.__native is None: self.__native = self._get_container() # depends on [control=['if'], data=[]] return self.__native
def plot_losses(calc_id, bins=7): """ losses_by_event plotter """ # read the hazard data dstore = util.read(calc_id) losses_by_rlzi = dict(extract(dstore, 'losses_by_event')) oq = dstore['oqparam'] plt = make_figure(losses_by_rlzi, oq.loss_dt().names, bins) plt.show()
def function[plot_losses, parameter[calc_id, bins]]: constant[ losses_by_event plotter ] variable[dstore] assign[=] call[name[util].read, parameter[name[calc_id]]] variable[losses_by_rlzi] assign[=] call[name[dict], parameter[call[name[extract], parameter[name[dstore], constant[losses_by_event]]]]] variable[oq] assign[=] call[name[dstore]][constant[oqparam]] variable[plt] assign[=] call[name[make_figure], parameter[name[losses_by_rlzi], call[name[oq].loss_dt, parameter[]].names, name[bins]]] call[name[plt].show, parameter[]]
keyword[def] identifier[plot_losses] ( identifier[calc_id] , identifier[bins] = literal[int] ): literal[string] identifier[dstore] = identifier[util] . identifier[read] ( identifier[calc_id] ) identifier[losses_by_rlzi] = identifier[dict] ( identifier[extract] ( identifier[dstore] , literal[string] )) identifier[oq] = identifier[dstore] [ literal[string] ] identifier[plt] = identifier[make_figure] ( identifier[losses_by_rlzi] , identifier[oq] . identifier[loss_dt] (). identifier[names] , identifier[bins] ) identifier[plt] . identifier[show] ()
def plot_losses(calc_id, bins=7): """ losses_by_event plotter """ # read the hazard data dstore = util.read(calc_id) losses_by_rlzi = dict(extract(dstore, 'losses_by_event')) oq = dstore['oqparam'] plt = make_figure(losses_by_rlzi, oq.loss_dt().names, bins) plt.show()
def array(source_array, ctx=None, dtype=None): """Creates an array from any object exposing the array interface. Parameters ---------- source_array : array_like An object exposing the array interface, an object whose `__array__` method returns an array, or any (nested) sequence. ctx : Context, optional Device context (default is the current default context). dtype : str or numpy.dtype, optional The data type of the output array. The default dtype is ``source_array.dtype`` if `source_array` is an `NDArray`, `float32` otherwise. Returns ------- NDArray, RowSparseNDArray or CSRNDArray An array with the same contents as the `source_array`. Examples -------- >>> import numpy as np >>> mx.nd.array([1, 2, 3]) <NDArray 3 @cpu(0)> >>> mx.nd.array([[1, 2], [3, 4]]) <NDArray 2x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2))) <NDArray 3x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2)), mx.gpu(0)) <NDArray 3x2 @gpu(0)> >>> mx.nd.array(mx.nd.zeros((3, 2), stype='row_sparse')) <RowSparseNDArray 3x2 @cpu(0)> """ if spsp is not None and isinstance(source_array, spsp.csr.csr_matrix): return _sparse_array(source_array, ctx=ctx, dtype=dtype) elif isinstance(source_array, NDArray) and source_array.stype != 'default': return _sparse_array(source_array, ctx=ctx, dtype=dtype) else: return _array(source_array, ctx=ctx, dtype=dtype)
def function[array, parameter[source_array, ctx, dtype]]: constant[Creates an array from any object exposing the array interface. Parameters ---------- source_array : array_like An object exposing the array interface, an object whose `__array__` method returns an array, or any (nested) sequence. ctx : Context, optional Device context (default is the current default context). dtype : str or numpy.dtype, optional The data type of the output array. The default dtype is ``source_array.dtype`` if `source_array` is an `NDArray`, `float32` otherwise. Returns ------- NDArray, RowSparseNDArray or CSRNDArray An array with the same contents as the `source_array`. Examples -------- >>> import numpy as np >>> mx.nd.array([1, 2, 3]) <NDArray 3 @cpu(0)> >>> mx.nd.array([[1, 2], [3, 4]]) <NDArray 2x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2))) <NDArray 3x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2)), mx.gpu(0)) <NDArray 3x2 @gpu(0)> >>> mx.nd.array(mx.nd.zeros((3, 2), stype='row_sparse')) <RowSparseNDArray 3x2 @cpu(0)> ] if <ast.BoolOp object at 0x7da1b2013a00> begin[:] return[call[name[_sparse_array], parameter[name[source_array]]]]
keyword[def] identifier[array] ( identifier[source_array] , identifier[ctx] = keyword[None] , identifier[dtype] = keyword[None] ): literal[string] keyword[if] identifier[spsp] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[source_array] , identifier[spsp] . identifier[csr] . identifier[csr_matrix] ): keyword[return] identifier[_sparse_array] ( identifier[source_array] , identifier[ctx] = identifier[ctx] , identifier[dtype] = identifier[dtype] ) keyword[elif] identifier[isinstance] ( identifier[source_array] , identifier[NDArray] ) keyword[and] identifier[source_array] . identifier[stype] != literal[string] : keyword[return] identifier[_sparse_array] ( identifier[source_array] , identifier[ctx] = identifier[ctx] , identifier[dtype] = identifier[dtype] ) keyword[else] : keyword[return] identifier[_array] ( identifier[source_array] , identifier[ctx] = identifier[ctx] , identifier[dtype] = identifier[dtype] )
def array(source_array, ctx=None, dtype=None): """Creates an array from any object exposing the array interface. Parameters ---------- source_array : array_like An object exposing the array interface, an object whose `__array__` method returns an array, or any (nested) sequence. ctx : Context, optional Device context (default is the current default context). dtype : str or numpy.dtype, optional The data type of the output array. The default dtype is ``source_array.dtype`` if `source_array` is an `NDArray`, `float32` otherwise. Returns ------- NDArray, RowSparseNDArray or CSRNDArray An array with the same contents as the `source_array`. Examples -------- >>> import numpy as np >>> mx.nd.array([1, 2, 3]) <NDArray 3 @cpu(0)> >>> mx.nd.array([[1, 2], [3, 4]]) <NDArray 2x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2))) <NDArray 3x2 @cpu(0)> >>> mx.nd.array(np.zeros((3, 2)), mx.gpu(0)) <NDArray 3x2 @gpu(0)> >>> mx.nd.array(mx.nd.zeros((3, 2), stype='row_sparse')) <RowSparseNDArray 3x2 @cpu(0)> """ if spsp is not None and isinstance(source_array, spsp.csr.csr_matrix): return _sparse_array(source_array, ctx=ctx, dtype=dtype) # depends on [control=['if'], data=[]] elif isinstance(source_array, NDArray) and source_array.stype != 'default': return _sparse_array(source_array, ctx=ctx, dtype=dtype) # depends on [control=['if'], data=[]] else: return _array(source_array, ctx=ctx, dtype=dtype)
def members(self): """Returns a list of :class:`Member` that are currently inside this voice channel.""" ret = [] for user_id, state in self.guild._voice_states.items(): if state.channel.id == self.id: member = self.guild.get_member(user_id) if member is not None: ret.append(member) return ret
def function[members, parameter[self]]: constant[Returns a list of :class:`Member` that are currently inside this voice channel.] variable[ret] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b2046e30>, <ast.Name object at 0x7da1b2046380>]]] in starred[call[name[self].guild._voice_states.items, parameter[]]] begin[:] if compare[name[state].channel.id equal[==] name[self].id] begin[:] variable[member] assign[=] call[name[self].guild.get_member, parameter[name[user_id]]] if compare[name[member] is_not constant[None]] begin[:] call[name[ret].append, parameter[name[member]]] return[name[ret]]
keyword[def] identifier[members] ( identifier[self] ): literal[string] identifier[ret] =[] keyword[for] identifier[user_id] , identifier[state] keyword[in] identifier[self] . identifier[guild] . identifier[_voice_states] . identifier[items] (): keyword[if] identifier[state] . identifier[channel] . identifier[id] == identifier[self] . identifier[id] : identifier[member] = identifier[self] . identifier[guild] . identifier[get_member] ( identifier[user_id] ) keyword[if] identifier[member] keyword[is] keyword[not] keyword[None] : identifier[ret] . identifier[append] ( identifier[member] ) keyword[return] identifier[ret]
def members(self): """Returns a list of :class:`Member` that are currently inside this voice channel.""" ret = [] for (user_id, state) in self.guild._voice_states.items(): if state.channel.id == self.id: member = self.guild.get_member(user_id) if member is not None: ret.append(member) # depends on [control=['if'], data=['member']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return ret
def cublasChpr(handle, uplo, n, alpha, x, incx, AP): """ Rank-1 operation on Hermitian-packed matrix. """ status = _libcublas.cublasChpr_v2(handle, _CUBLAS_FILL_MODE[uplo], n, ctypes.byref(ctypes.c_float(alpha)), int(x), incx, int(AP)) cublasCheckStatus(status)
def function[cublasChpr, parameter[handle, uplo, n, alpha, x, incx, AP]]: constant[ Rank-1 operation on Hermitian-packed matrix. ] variable[status] assign[=] call[name[_libcublas].cublasChpr_v2, parameter[name[handle], call[name[_CUBLAS_FILL_MODE]][name[uplo]], name[n], call[name[ctypes].byref, parameter[call[name[ctypes].c_float, parameter[name[alpha]]]]], call[name[int], parameter[name[x]]], name[incx], call[name[int], parameter[name[AP]]]]] call[name[cublasCheckStatus], parameter[name[status]]]
keyword[def] identifier[cublasChpr] ( identifier[handle] , identifier[uplo] , identifier[n] , identifier[alpha] , identifier[x] , identifier[incx] , identifier[AP] ): literal[string] identifier[status] = identifier[_libcublas] . identifier[cublasChpr_v2] ( identifier[handle] , identifier[_CUBLAS_FILL_MODE] [ identifier[uplo] ], identifier[n] , identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_float] ( identifier[alpha] )), identifier[int] ( identifier[x] ), identifier[incx] , identifier[int] ( identifier[AP] )) identifier[cublasCheckStatus] ( identifier[status] )
def cublasChpr(handle, uplo, n, alpha, x, incx, AP): """ Rank-1 operation on Hermitian-packed matrix. """ status = _libcublas.cublasChpr_v2(handle, _CUBLAS_FILL_MODE[uplo], n, ctypes.byref(ctypes.c_float(alpha)), int(x), incx, int(AP)) cublasCheckStatus(status)
def add_platform(name, platform_set, server_url): ''' To add an ASAM platform using the specified ASAM platform set on the Novell Fan-Out Driver CLI Example: .. code-block:: bash salt-run asam.add_platform my-test-vm test-platform-set prov1.domain.com ''' config = _get_asam_configuration(server_url) if not config: return False platforms = list_platforms(server_url) if name in platforms[server_url]: return {name: "Specified platform already exists on {0}".format(server_url)} platform_sets = list_platform_sets(server_url) if platform_set not in platform_sets[server_url]: return {name: "Specified platform set does not exist on {0}".format(server_url)} url = config['platform_edit_url'] data = { 'platformName': name, 'platformSetName': platform_set, 'manual': 'false', 'previousURL': '/config/platformAdd.html', 'postType': 'PlatformAdd', 'Submit': 'Apply' } auth = ( config['username'], config['password'] ) try: html_content = _make_post_request(url, data, auth, verify=False) except Exception as exc: err_msg = "Failed to add platform on {0}".format(server_url) log.error('%s:\n%s', err_msg, exc) return {name: err_msg} platforms = list_platforms(server_url) if name in platforms[server_url]: return {name: "Successfully added platform on {0}".format(server_url)} else: return {name: "Failed to add platform on {0}".format(server_url)}
def function[add_platform, parameter[name, platform_set, server_url]]: constant[ To add an ASAM platform using the specified ASAM platform set on the Novell Fan-Out Driver CLI Example: .. code-block:: bash salt-run asam.add_platform my-test-vm test-platform-set prov1.domain.com ] variable[config] assign[=] call[name[_get_asam_configuration], parameter[name[server_url]]] if <ast.UnaryOp object at 0x7da1b210a3b0> begin[:] return[constant[False]] variable[platforms] assign[=] call[name[list_platforms], parameter[name[server_url]]] if compare[name[name] in call[name[platforms]][name[server_url]]] begin[:] return[dictionary[[<ast.Name object at 0x7da1b210a650>], [<ast.Call object at 0x7da1b21088b0>]]] variable[platform_sets] assign[=] call[name[list_platform_sets], parameter[name[server_url]]] if compare[name[platform_set] <ast.NotIn object at 0x7da2590d7190> call[name[platform_sets]][name[server_url]]] begin[:] return[dictionary[[<ast.Name object at 0x7da1b210a290>], [<ast.Call object at 0x7da1b210abc0>]]] variable[url] assign[=] call[name[config]][constant[platform_edit_url]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c22da0>, <ast.Constant object at 0x7da1b208b0d0>, <ast.Constant object at 0x7da1b2089f60>, <ast.Constant object at 0x7da1b2089c60>, <ast.Constant object at 0x7da1b2089840>, <ast.Constant object at 0x7da1b208a3b0>], [<ast.Name object at 0x7da1b20890f0>, <ast.Name object at 0x7da1b2089ba0>, <ast.Constant object at 0x7da18f00df00>, <ast.Constant object at 0x7da18f00d9f0>, <ast.Constant object at 0x7da18f00f610>, <ast.Constant object at 0x7da18f00f4f0>]] variable[auth] assign[=] tuple[[<ast.Subscript object at 0x7da18f00ea10>, <ast.Subscript object at 0x7da18f00f7c0>]] <ast.Try object at 0x7da18f00e260> variable[platforms] assign[=] call[name[list_platforms], parameter[name[server_url]]] if compare[name[name] in call[name[platforms]][name[server_url]]] begin[:] return[dictionary[[<ast.Name object at 0x7da18f00f4c0>], [<ast.Call object at 0x7da18f00d720>]]]
keyword[def] identifier[add_platform] ( identifier[name] , identifier[platform_set] , identifier[server_url] ): literal[string] identifier[config] = identifier[_get_asam_configuration] ( identifier[server_url] ) keyword[if] keyword[not] identifier[config] : keyword[return] keyword[False] identifier[platforms] = identifier[list_platforms] ( identifier[server_url] ) keyword[if] identifier[name] keyword[in] identifier[platforms] [ identifier[server_url] ]: keyword[return] { identifier[name] : literal[string] . identifier[format] ( identifier[server_url] )} identifier[platform_sets] = identifier[list_platform_sets] ( identifier[server_url] ) keyword[if] identifier[platform_set] keyword[not] keyword[in] identifier[platform_sets] [ identifier[server_url] ]: keyword[return] { identifier[name] : literal[string] . identifier[format] ( identifier[server_url] )} identifier[url] = identifier[config] [ literal[string] ] identifier[data] ={ literal[string] : identifier[name] , literal[string] : identifier[platform_set] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[auth] =( identifier[config] [ literal[string] ], identifier[config] [ literal[string] ] ) keyword[try] : identifier[html_content] = identifier[_make_post_request] ( identifier[url] , identifier[data] , identifier[auth] , identifier[verify] = keyword[False] ) keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[err_msg] = literal[string] . identifier[format] ( identifier[server_url] ) identifier[log] . identifier[error] ( literal[string] , identifier[err_msg] , identifier[exc] ) keyword[return] { identifier[name] : identifier[err_msg] } identifier[platforms] = identifier[list_platforms] ( identifier[server_url] ) keyword[if] identifier[name] keyword[in] identifier[platforms] [ identifier[server_url] ]: keyword[return] { identifier[name] : literal[string] . identifier[format] ( identifier[server_url] )} keyword[else] : keyword[return] { identifier[name] : literal[string] . identifier[format] ( identifier[server_url] )}
def add_platform(name, platform_set, server_url): """ To add an ASAM platform using the specified ASAM platform set on the Novell Fan-Out Driver CLI Example: .. code-block:: bash salt-run asam.add_platform my-test-vm test-platform-set prov1.domain.com """ config = _get_asam_configuration(server_url) if not config: return False # depends on [control=['if'], data=[]] platforms = list_platforms(server_url) if name in platforms[server_url]: return {name: 'Specified platform already exists on {0}'.format(server_url)} # depends on [control=['if'], data=['name']] platform_sets = list_platform_sets(server_url) if platform_set not in platform_sets[server_url]: return {name: 'Specified platform set does not exist on {0}'.format(server_url)} # depends on [control=['if'], data=[]] url = config['platform_edit_url'] data = {'platformName': name, 'platformSetName': platform_set, 'manual': 'false', 'previousURL': '/config/platformAdd.html', 'postType': 'PlatformAdd', 'Submit': 'Apply'} auth = (config['username'], config['password']) try: html_content = _make_post_request(url, data, auth, verify=False) # depends on [control=['try'], data=[]] except Exception as exc: err_msg = 'Failed to add platform on {0}'.format(server_url) log.error('%s:\n%s', err_msg, exc) return {name: err_msg} # depends on [control=['except'], data=['exc']] platforms = list_platforms(server_url) if name in platforms[server_url]: return {name: 'Successfully added platform on {0}'.format(server_url)} # depends on [control=['if'], data=['name']] else: return {name: 'Failed to add platform on {0}'.format(server_url)}
def trim_path(path, length=30): """ trim path to specified length, for example: >>> a = '/project/apps/default/settings.ini' >>> trim_path(a) '.../apps/default/settings.ini' The real length will be length-4, it'll left '.../' for output. """ s = path.replace('\\', '/').split('/') t = -1 for i in range(len(s)-1, -1, -1): t = len(s[i]) + t + 1 if t > length-4: break return '.../' + '/'.join(s[i+1:])
def function[trim_path, parameter[path, length]]: constant[ trim path to specified length, for example: >>> a = '/project/apps/default/settings.ini' >>> trim_path(a) '.../apps/default/settings.ini' The real length will be length-4, it'll left '.../' for output. ] variable[s] assign[=] call[call[name[path].replace, parameter[constant[\], constant[/]]].split, parameter[constant[/]]] variable[t] assign[=] <ast.UnaryOp object at 0x7da1b1113d00> for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[s]]] - constant[1]], <ast.UnaryOp object at 0x7da1b11122f0>, <ast.UnaryOp object at 0x7da1b1113fd0>]]] begin[:] variable[t] assign[=] binary_operation[binary_operation[call[name[len], parameter[call[name[s]][name[i]]]] + name[t]] + constant[1]] if compare[name[t] greater[>] binary_operation[name[length] - constant[4]]] begin[:] break return[binary_operation[constant[.../] + call[constant[/].join, parameter[call[name[s]][<ast.Slice object at 0x7da1b1112dd0>]]]]]
keyword[def] identifier[trim_path] ( identifier[path] , identifier[length] = literal[int] ): literal[string] identifier[s] = identifier[path] . identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] ) identifier[t] =- literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[s] )- literal[int] ,- literal[int] ,- literal[int] ): identifier[t] = identifier[len] ( identifier[s] [ identifier[i] ])+ identifier[t] + literal[int] keyword[if] identifier[t] > identifier[length] - literal[int] : keyword[break] keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[s] [ identifier[i] + literal[int] :])
def trim_path(path, length=30): """ trim path to specified length, for example: >>> a = '/project/apps/default/settings.ini' >>> trim_path(a) '.../apps/default/settings.ini' The real length will be length-4, it'll left '.../' for output. """ s = path.replace('\\', '/').split('/') t = -1 for i in range(len(s) - 1, -1, -1): t = len(s[i]) + t + 1 if t > length - 4: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return '.../' + '/'.join(s[i + 1:])
def MakeTargetMask(target, pad=0): """Create an attention mask to hide padding and future words.""" target_mask = (target != pad)[ :, np.newaxis, :] target_dtype = target_mask.dtype causal_mask = onp.tril(onp.ones((1, target.shape[-1], target.shape[-1]), dtype=target_dtype), k=0) target_mask = target_mask & causal_mask return np.expand_dims(target_mask, axis=1)
def function[MakeTargetMask, parameter[target, pad]]: constant[Create an attention mask to hide padding and future words.] variable[target_mask] assign[=] call[compare[name[target] not_equal[!=] name[pad]]][tuple[[<ast.Slice object at 0x7da18f58c430>, <ast.Attribute object at 0x7da18f58d9c0>, <ast.Slice object at 0x7da18f58ea40>]]] variable[target_dtype] assign[=] name[target_mask].dtype variable[causal_mask] assign[=] call[name[onp].tril, parameter[call[name[onp].ones, parameter[tuple[[<ast.Constant object at 0x7da18f58c8e0>, <ast.Subscript object at 0x7da18f58dc30>, <ast.Subscript object at 0x7da18f58e200>]]]]]] variable[target_mask] assign[=] binary_operation[name[target_mask] <ast.BitAnd object at 0x7da2590d6b60> name[causal_mask]] return[call[name[np].expand_dims, parameter[name[target_mask]]]]
keyword[def] identifier[MakeTargetMask] ( identifier[target] , identifier[pad] = literal[int] ): literal[string] identifier[target_mask] =( identifier[target] != identifier[pad] )[:, identifier[np] . identifier[newaxis] ,:] identifier[target_dtype] = identifier[target_mask] . identifier[dtype] identifier[causal_mask] = identifier[onp] . identifier[tril] ( identifier[onp] . identifier[ones] (( literal[int] , identifier[target] . identifier[shape] [- literal[int] ], identifier[target] . identifier[shape] [- literal[int] ]), identifier[dtype] = identifier[target_dtype] ), identifier[k] = literal[int] ) identifier[target_mask] = identifier[target_mask] & identifier[causal_mask] keyword[return] identifier[np] . identifier[expand_dims] ( identifier[target_mask] , identifier[axis] = literal[int] )
def MakeTargetMask(target, pad=0): """Create an attention mask to hide padding and future words.""" target_mask = (target != pad)[:, np.newaxis, :] target_dtype = target_mask.dtype causal_mask = onp.tril(onp.ones((1, target.shape[-1], target.shape[-1]), dtype=target_dtype), k=0) target_mask = target_mask & causal_mask return np.expand_dims(target_mask, axis=1)
def normalize_codec_name(name): '''Return the Python name of the encoder/decoder Returns: str, None ''' name = UnicodeDammit.CHARSET_ALIASES.get(name.lower(), name) try: return codecs.lookup(name).name except (LookupError, TypeError, ValueError): # TypeError occurs when name contains \x00 (ValueError in Py3.5) pass
def function[normalize_codec_name, parameter[name]]: constant[Return the Python name of the encoder/decoder Returns: str, None ] variable[name] assign[=] call[name[UnicodeDammit].CHARSET_ALIASES.get, parameter[call[name[name].lower, parameter[]], name[name]]] <ast.Try object at 0x7da2044c0400>
keyword[def] identifier[normalize_codec_name] ( identifier[name] ): literal[string] identifier[name] = identifier[UnicodeDammit] . identifier[CHARSET_ALIASES] . identifier[get] ( identifier[name] . identifier[lower] (), identifier[name] ) keyword[try] : keyword[return] identifier[codecs] . identifier[lookup] ( identifier[name] ). identifier[name] keyword[except] ( identifier[LookupError] , identifier[TypeError] , identifier[ValueError] ): keyword[pass]
def normalize_codec_name(name): """Return the Python name of the encoder/decoder Returns: str, None """ name = UnicodeDammit.CHARSET_ALIASES.get(name.lower(), name) try: return codecs.lookup(name).name # depends on [control=['try'], data=[]] except (LookupError, TypeError, ValueError): # TypeError occurs when name contains \x00 (ValueError in Py3.5) pass # depends on [control=['except'], data=[]]
def SerializeExclusiveData(self, writer): """ Serialize object. Args: writer (neo.IO.BinaryWriter): """ writer.WriteVarBytes(self.Script) if self.Version >= 1: writer.WriteFixed8(self.Gas)
def function[SerializeExclusiveData, parameter[self, writer]]: constant[ Serialize object. Args: writer (neo.IO.BinaryWriter): ] call[name[writer].WriteVarBytes, parameter[name[self].Script]] if compare[name[self].Version greater_or_equal[>=] constant[1]] begin[:] call[name[writer].WriteFixed8, parameter[name[self].Gas]]
keyword[def] identifier[SerializeExclusiveData] ( identifier[self] , identifier[writer] ): literal[string] identifier[writer] . identifier[WriteVarBytes] ( identifier[self] . identifier[Script] ) keyword[if] identifier[self] . identifier[Version] >= literal[int] : identifier[writer] . identifier[WriteFixed8] ( identifier[self] . identifier[Gas] )
def SerializeExclusiveData(self, writer): """ Serialize object. Args: writer (neo.IO.BinaryWriter): """ writer.WriteVarBytes(self.Script) if self.Version >= 1: writer.WriteFixed8(self.Gas) # depends on [control=['if'], data=[]]
def _write_method(schema): """Add a write method for named schema to a class. """ def method( self, filename=None, schema=schema, id_col='uid', sequence_col='sequence', extra_data=None, alphabet=None, **kwargs): # Use generic write class to write data. return _write( self._data, filename=filename, schema=schema, id_col=id_col, sequence_col=sequence_col, extra_data=extra_data, alphabet=alphabet, **kwargs ) # Update docs method.__doc__ = _write_doc_template(schema) return method
def function[_write_method, parameter[schema]]: constant[Add a write method for named schema to a class. ] def function[method, parameter[self, filename, schema, id_col, sequence_col, extra_data, alphabet]]: return[call[name[_write], parameter[name[self]._data]]] name[method].__doc__ assign[=] call[name[_write_doc_template], parameter[name[schema]]] return[name[method]]
keyword[def] identifier[_write_method] ( identifier[schema] ): literal[string] keyword[def] identifier[method] ( identifier[self] , identifier[filename] = keyword[None] , identifier[schema] = identifier[schema] , identifier[id_col] = literal[string] , identifier[sequence_col] = literal[string] , identifier[extra_data] = keyword[None] , identifier[alphabet] = keyword[None] , ** identifier[kwargs] ): keyword[return] identifier[_write] ( identifier[self] . identifier[_data] , identifier[filename] = identifier[filename] , identifier[schema] = identifier[schema] , identifier[id_col] = identifier[id_col] , identifier[sequence_col] = identifier[sequence_col] , identifier[extra_data] = identifier[extra_data] , identifier[alphabet] = identifier[alphabet] , ** identifier[kwargs] ) identifier[method] . identifier[__doc__] = identifier[_write_doc_template] ( identifier[schema] ) keyword[return] identifier[method]
def _write_method(schema): """Add a write method for named schema to a class. """ def method(self, filename=None, schema=schema, id_col='uid', sequence_col='sequence', extra_data=None, alphabet=None, **kwargs): # Use generic write class to write data. return _write(self._data, filename=filename, schema=schema, id_col=id_col, sequence_col=sequence_col, extra_data=extra_data, alphabet=alphabet, **kwargs) # Update docs method.__doc__ = _write_doc_template(schema) return method
def AddWiFiDevice(self, device_name, iface_name, state): '''Add a WiFi Device. You have to specify device_name, device interface name (e. g. wlan0) and state. You can use the predefined DeviceState values (e. g. DeviceState.ACTIVATED) or supply a numeric value. For valid state values, please visit http://projects.gnome.org/NetworkManager/developers/api/09/spec.html#type-NM_DEVICE_STATE Please note that this does not set any global properties. Returns the new object path. ''' path = '/org/freedesktop/NetworkManager/Devices/' + device_name self.AddObject(path, WIRELESS_DEVICE_IFACE, { 'HwAddress': dbus.String('11:22:33:44:55:66'), 'PermHwAddress': dbus.String('11:22:33:44:55:66'), 'Bitrate': dbus.UInt32(5400), 'Mode': dbus.UInt32(2), 'WirelessCapabilities': dbus.UInt32(255), 'AccessPoints': dbus.Array([], signature='o'), }, [ ('GetAccessPoints', '', 'ao', 'ret = self.access_points'), ('GetAllAccessPoints', '', 'ao', 'ret = self.access_points'), ('RequestScan', 'a{sv}', '', ''), ]) dev_obj = dbusmock.get_object(path) dev_obj.access_points = [] dev_obj.AddProperties(DEVICE_IFACE, { 'ActiveConnection': dbus.ObjectPath('/'), 'AvailableConnections': dbus.Array([], signature='o'), 'AutoConnect': False, 'Managed': True, 'Driver': 'dbusmock', 'DeviceType': dbus.UInt32(2), 'State': dbus.UInt32(state), 'Interface': iface_name, 'IpInterface': iface_name, }) self.object_manager_emit_added(path) NM = dbusmock.get_object(MANAGER_OBJ) devices = NM.Get(MANAGER_IFACE, 'Devices') devices.append(path) NM.Set(MANAGER_IFACE, 'Devices', devices) NM.EmitSignal('org.freedesktop.NetworkManager', 'DeviceAdded', 'o', [path]) return path
def function[AddWiFiDevice, parameter[self, device_name, iface_name, state]]: constant[Add a WiFi Device. You have to specify device_name, device interface name (e. g. wlan0) and state. You can use the predefined DeviceState values (e. g. DeviceState.ACTIVATED) or supply a numeric value. For valid state values, please visit http://projects.gnome.org/NetworkManager/developers/api/09/spec.html#type-NM_DEVICE_STATE Please note that this does not set any global properties. Returns the new object path. ] variable[path] assign[=] binary_operation[constant[/org/freedesktop/NetworkManager/Devices/] + name[device_name]] call[name[self].AddObject, parameter[name[path], name[WIRELESS_DEVICE_IFACE], dictionary[[<ast.Constant object at 0x7da18f810190>, <ast.Constant object at 0x7da18f812470>, <ast.Constant object at 0x7da18f811870>, <ast.Constant object at 0x7da18f811ae0>, <ast.Constant object at 0x7da18f810670>, <ast.Constant object at 0x7da18f810cd0>], [<ast.Call object at 0x7da18f813250>, <ast.Call object at 0x7da18f810a00>, <ast.Call object at 0x7da18f811ab0>, <ast.Call object at 0x7da18f8114e0>, <ast.Call object at 0x7da18f8119f0>, <ast.Call object at 0x7da18f8105b0>]], list[[<ast.Tuple object at 0x7da18f813340>, <ast.Tuple object at 0x7da18f8104c0>, <ast.Tuple object at 0x7da18f812620>]]]] variable[dev_obj] assign[=] call[name[dbusmock].get_object, parameter[name[path]]] name[dev_obj].access_points assign[=] list[[]] call[name[dev_obj].AddProperties, parameter[name[DEVICE_IFACE], dictionary[[<ast.Constant object at 0x7da18f8107f0>, <ast.Constant object at 0x7da18f813b20>, <ast.Constant object at 0x7da18f811240>, <ast.Constant object at 0x7da18f810490>, <ast.Constant object at 0x7da18f8110c0>, <ast.Constant object at 0x7da18f811e40>, <ast.Constant object at 0x7da18f8120b0>, <ast.Constant object at 0x7da18f812e00>, <ast.Constant object at 0x7da18f813b50>], [<ast.Call object at 0x7da18f8106d0>, <ast.Call object at 0x7da18f813f40>, <ast.Constant object at 0x7da18f812a70>, <ast.Constant object at 0x7da18f810910>, <ast.Constant object at 0x7da18f8122f0>, <ast.Call object at 0x7da18f812230>, <ast.Call object at 0x7da18f812950>, <ast.Name object at 0x7da18f812860>, <ast.Name object at 0x7da18f813640>]]]] call[name[self].object_manager_emit_added, parameter[name[path]]] variable[NM] assign[=] call[name[dbusmock].get_object, parameter[name[MANAGER_OBJ]]] variable[devices] assign[=] call[name[NM].Get, parameter[name[MANAGER_IFACE], constant[Devices]]] call[name[devices].append, parameter[name[path]]] call[name[NM].Set, parameter[name[MANAGER_IFACE], constant[Devices], name[devices]]] call[name[NM].EmitSignal, parameter[constant[org.freedesktop.NetworkManager], constant[DeviceAdded], constant[o], list[[<ast.Name object at 0x7da18f812560>]]]] return[name[path]]
keyword[def] identifier[AddWiFiDevice] ( identifier[self] , identifier[device_name] , identifier[iface_name] , identifier[state] ): literal[string] identifier[path] = literal[string] + identifier[device_name] identifier[self] . identifier[AddObject] ( identifier[path] , identifier[WIRELESS_DEVICE_IFACE] , { literal[string] : identifier[dbus] . identifier[String] ( literal[string] ), literal[string] : identifier[dbus] . identifier[String] ( literal[string] ), literal[string] : identifier[dbus] . identifier[UInt32] ( literal[int] ), literal[string] : identifier[dbus] . identifier[UInt32] ( literal[int] ), literal[string] : identifier[dbus] . identifier[UInt32] ( literal[int] ), literal[string] : identifier[dbus] . identifier[Array] ([], identifier[signature] = literal[string] ), }, [ ( literal[string] , literal[string] , literal[string] , literal[string] ), ( literal[string] , literal[string] , literal[string] , literal[string] ), ( literal[string] , literal[string] , literal[string] , literal[string] ), ]) identifier[dev_obj] = identifier[dbusmock] . identifier[get_object] ( identifier[path] ) identifier[dev_obj] . identifier[access_points] =[] identifier[dev_obj] . identifier[AddProperties] ( identifier[DEVICE_IFACE] , { literal[string] : identifier[dbus] . identifier[ObjectPath] ( literal[string] ), literal[string] : identifier[dbus] . identifier[Array] ([], identifier[signature] = literal[string] ), literal[string] : keyword[False] , literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] : identifier[dbus] . identifier[UInt32] ( literal[int] ), literal[string] : identifier[dbus] . identifier[UInt32] ( identifier[state] ), literal[string] : identifier[iface_name] , literal[string] : identifier[iface_name] , }) identifier[self] . identifier[object_manager_emit_added] ( identifier[path] ) identifier[NM] = identifier[dbusmock] . identifier[get_object] ( identifier[MANAGER_OBJ] ) identifier[devices] = identifier[NM] . identifier[Get] ( identifier[MANAGER_IFACE] , literal[string] ) identifier[devices] . identifier[append] ( identifier[path] ) identifier[NM] . identifier[Set] ( identifier[MANAGER_IFACE] , literal[string] , identifier[devices] ) identifier[NM] . identifier[EmitSignal] ( literal[string] , literal[string] , literal[string] ,[ identifier[path] ]) keyword[return] identifier[path]
def AddWiFiDevice(self, device_name, iface_name, state): """Add a WiFi Device. You have to specify device_name, device interface name (e. g. wlan0) and state. You can use the predefined DeviceState values (e. g. DeviceState.ACTIVATED) or supply a numeric value. For valid state values, please visit http://projects.gnome.org/NetworkManager/developers/api/09/spec.html#type-NM_DEVICE_STATE Please note that this does not set any global properties. Returns the new object path. """ path = '/org/freedesktop/NetworkManager/Devices/' + device_name self.AddObject(path, WIRELESS_DEVICE_IFACE, {'HwAddress': dbus.String('11:22:33:44:55:66'), 'PermHwAddress': dbus.String('11:22:33:44:55:66'), 'Bitrate': dbus.UInt32(5400), 'Mode': dbus.UInt32(2), 'WirelessCapabilities': dbus.UInt32(255), 'AccessPoints': dbus.Array([], signature='o')}, [('GetAccessPoints', '', 'ao', 'ret = self.access_points'), ('GetAllAccessPoints', '', 'ao', 'ret = self.access_points'), ('RequestScan', 'a{sv}', '', '')]) dev_obj = dbusmock.get_object(path) dev_obj.access_points = [] dev_obj.AddProperties(DEVICE_IFACE, {'ActiveConnection': dbus.ObjectPath('/'), 'AvailableConnections': dbus.Array([], signature='o'), 'AutoConnect': False, 'Managed': True, 'Driver': 'dbusmock', 'DeviceType': dbus.UInt32(2), 'State': dbus.UInt32(state), 'Interface': iface_name, 'IpInterface': iface_name}) self.object_manager_emit_added(path) NM = dbusmock.get_object(MANAGER_OBJ) devices = NM.Get(MANAGER_IFACE, 'Devices') devices.append(path) NM.Set(MANAGER_IFACE, 'Devices', devices) NM.EmitSignal('org.freedesktop.NetworkManager', 'DeviceAdded', 'o', [path]) return path
def request(self, filter=None): """Retrieve running configuration and device state information. *filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved) :seealso: :ref:`filter_params` """ node = new_ele("get-bulk") if filter is not None: node.append(util.build_filter(filter)) return self._request(node)
def function[request, parameter[self, filter]]: constant[Retrieve running configuration and device state information. *filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved) :seealso: :ref:`filter_params` ] variable[node] assign[=] call[name[new_ele], parameter[constant[get-bulk]]] if compare[name[filter] is_not constant[None]] begin[:] call[name[node].append, parameter[call[name[util].build_filter, parameter[name[filter]]]]] return[call[name[self]._request, parameter[name[node]]]]
keyword[def] identifier[request] ( identifier[self] , identifier[filter] = keyword[None] ): literal[string] identifier[node] = identifier[new_ele] ( literal[string] ) keyword[if] identifier[filter] keyword[is] keyword[not] keyword[None] : identifier[node] . identifier[append] ( identifier[util] . identifier[build_filter] ( identifier[filter] )) keyword[return] identifier[self] . identifier[_request] ( identifier[node] )
def request(self, filter=None): """Retrieve running configuration and device state information. *filter* specifies the portion of the configuration to retrieve (by default entire configuration is retrieved) :seealso: :ref:`filter_params` """ node = new_ele('get-bulk') if filter is not None: node.append(util.build_filter(filter)) # depends on [control=['if'], data=['filter']] return self._request(node)
def _process_loop(self): '''Fetch URL including redirects. Coroutine. ''' while not self._web_client_session.done(): self._item_session.request = self._web_client_session.next_request() verdict, reason = self._should_fetch_reason() _logger.debug('Filter verdict {} reason {}', verdict, reason) if not verdict: self._item_session.skip() break exit_early, wait_time = yield from self._fetch_one(cast(Request, self._item_session.request)) if wait_time: _logger.debug('Sleeping {}', wait_time) yield from asyncio.sleep(wait_time) if exit_early: break
def function[_process_loop, parameter[self]]: constant[Fetch URL including redirects. Coroutine. ] while <ast.UnaryOp object at 0x7da2043456c0> begin[:] name[self]._item_session.request assign[=] call[name[self]._web_client_session.next_request, parameter[]] <ast.Tuple object at 0x7da18f723f10> assign[=] call[name[self]._should_fetch_reason, parameter[]] call[name[_logger].debug, parameter[constant[Filter verdict {} reason {}], name[verdict], name[reason]]] if <ast.UnaryOp object at 0x7da18f720a30> begin[:] call[name[self]._item_session.skip, parameter[]] break <ast.Tuple object at 0x7da18f721120> assign[=] <ast.YieldFrom object at 0x7da18f720310> if name[wait_time] begin[:] call[name[_logger].debug, parameter[constant[Sleeping {}], name[wait_time]]] <ast.YieldFrom object at 0x7da1b26afe50> if name[exit_early] begin[:] break
keyword[def] identifier[_process_loop] ( identifier[self] ): literal[string] keyword[while] keyword[not] identifier[self] . identifier[_web_client_session] . identifier[done] (): identifier[self] . identifier[_item_session] . identifier[request] = identifier[self] . identifier[_web_client_session] . identifier[next_request] () identifier[verdict] , identifier[reason] = identifier[self] . identifier[_should_fetch_reason] () identifier[_logger] . identifier[debug] ( literal[string] , identifier[verdict] , identifier[reason] ) keyword[if] keyword[not] identifier[verdict] : identifier[self] . identifier[_item_session] . identifier[skip] () keyword[break] identifier[exit_early] , identifier[wait_time] = keyword[yield] keyword[from] identifier[self] . identifier[_fetch_one] ( identifier[cast] ( identifier[Request] , identifier[self] . identifier[_item_session] . identifier[request] )) keyword[if] identifier[wait_time] : identifier[_logger] . identifier[debug] ( literal[string] , identifier[wait_time] ) keyword[yield] keyword[from] identifier[asyncio] . identifier[sleep] ( identifier[wait_time] ) keyword[if] identifier[exit_early] : keyword[break]
def _process_loop(self): """Fetch URL including redirects. Coroutine. """ while not self._web_client_session.done(): self._item_session.request = self._web_client_session.next_request() (verdict, reason) = self._should_fetch_reason() _logger.debug('Filter verdict {} reason {}', verdict, reason) if not verdict: self._item_session.skip() break # depends on [control=['if'], data=[]] (exit_early, wait_time) = (yield from self._fetch_one(cast(Request, self._item_session.request))) if wait_time: _logger.debug('Sleeping {}', wait_time) yield from asyncio.sleep(wait_time) # depends on [control=['if'], data=[]] if exit_early: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def _resolve_path(self, path): """ Resolve static file paths """ filepath = None mimetype = None for root, dirs, files in self.filter_files(self.path): # Does it exist in error path? error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates', path) try: with open(error_path): mimetype, encoding = mimetypes.guess_type(error_path) filepath = error_path except IOError: pass # Does it exist in Tarbell blueprint? if self.base: basepath = os.path.join(root, self.blueprint_name, path) try: with open(basepath): mimetype, encoding = mimetypes.guess_type(basepath) filepath = basepath except IOError: pass # Does it exist under regular path? fullpath = os.path.join(root, path) try: with open(fullpath): mimetype, encoding = mimetypes.guess_type(fullpath) filepath = fullpath except IOError: pass return filepath, mimetype
def function[_resolve_path, parameter[self, path]]: constant[ Resolve static file paths ] variable[filepath] assign[=] constant[None] variable[mimetype] assign[=] constant[None] for taget[tuple[[<ast.Name object at 0x7da1b1920520>, <ast.Name object at 0x7da1b1920fa0>, <ast.Name object at 0x7da1b1923430>]]] in starred[call[name[self].filter_files, parameter[name[self].path]]] begin[:] variable[error_path] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[error_templates], name[path]]] <ast.Try object at 0x7da1b1920df0> if name[self].base begin[:] variable[basepath] assign[=] call[name[os].path.join, parameter[name[root], name[self].blueprint_name, name[path]]] <ast.Try object at 0x7da1b1a1e9b0> variable[fullpath] assign[=] call[name[os].path.join, parameter[name[root], name[path]]] <ast.Try object at 0x7da1b1a1c100> return[tuple[[<ast.Name object at 0x7da1b1a1d030>, <ast.Name object at 0x7da1b1a1eaa0>]]]
keyword[def] identifier[_resolve_path] ( identifier[self] , identifier[path] ): literal[string] identifier[filepath] = keyword[None] identifier[mimetype] = keyword[None] keyword[for] identifier[root] , identifier[dirs] , identifier[files] keyword[in] identifier[self] . identifier[filter_files] ( identifier[self] . identifier[path] ): identifier[error_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )), literal[string] , identifier[path] ) keyword[try] : keyword[with] identifier[open] ( identifier[error_path] ): identifier[mimetype] , identifier[encoding] = identifier[mimetypes] . identifier[guess_type] ( identifier[error_path] ) identifier[filepath] = identifier[error_path] keyword[except] identifier[IOError] : keyword[pass] keyword[if] identifier[self] . identifier[base] : identifier[basepath] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[self] . identifier[blueprint_name] , identifier[path] ) keyword[try] : keyword[with] identifier[open] ( identifier[basepath] ): identifier[mimetype] , identifier[encoding] = identifier[mimetypes] . identifier[guess_type] ( identifier[basepath] ) identifier[filepath] = identifier[basepath] keyword[except] identifier[IOError] : keyword[pass] identifier[fullpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[path] ) keyword[try] : keyword[with] identifier[open] ( identifier[fullpath] ): identifier[mimetype] , identifier[encoding] = identifier[mimetypes] . identifier[guess_type] ( identifier[fullpath] ) identifier[filepath] = identifier[fullpath] keyword[except] identifier[IOError] : keyword[pass] keyword[return] identifier[filepath] , identifier[mimetype]
def _resolve_path(self, path): """ Resolve static file paths """ filepath = None mimetype = None for (root, dirs, files) in self.filter_files(self.path): # Does it exist in error path? error_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'error_templates', path) try: with open(error_path): (mimetype, encoding) = mimetypes.guess_type(error_path) filepath = error_path # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] # Does it exist in Tarbell blueprint? if self.base: basepath = os.path.join(root, self.blueprint_name, path) try: with open(basepath): (mimetype, encoding) = mimetypes.guess_type(basepath) filepath = basepath # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Does it exist under regular path? fullpath = os.path.join(root, path) try: with open(fullpath): (mimetype, encoding) = mimetypes.guess_type(fullpath) filepath = fullpath # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except IOError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] return (filepath, mimetype)
def wind_shear(shear: str, unit_alt: str = 'ft', unit_wind: str = 'kt') -> str: """ Format wind shear string into a spoken word string """ unit_alt = SPOKEN_UNITS.get(unit_alt, unit_alt) unit_wind = SPOKEN_UNITS.get(unit_wind, unit_wind) return translate.wind_shear(shear, unit_alt, unit_wind, spoken=True) or 'Wind shear unknown'
def function[wind_shear, parameter[shear, unit_alt, unit_wind]]: constant[ Format wind shear string into a spoken word string ] variable[unit_alt] assign[=] call[name[SPOKEN_UNITS].get, parameter[name[unit_alt], name[unit_alt]]] variable[unit_wind] assign[=] call[name[SPOKEN_UNITS].get, parameter[name[unit_wind], name[unit_wind]]] return[<ast.BoolOp object at 0x7da204961d20>]
keyword[def] identifier[wind_shear] ( identifier[shear] : identifier[str] , identifier[unit_alt] : identifier[str] = literal[string] , identifier[unit_wind] : identifier[str] = literal[string] )-> identifier[str] : literal[string] identifier[unit_alt] = identifier[SPOKEN_UNITS] . identifier[get] ( identifier[unit_alt] , identifier[unit_alt] ) identifier[unit_wind] = identifier[SPOKEN_UNITS] . identifier[get] ( identifier[unit_wind] , identifier[unit_wind] ) keyword[return] identifier[translate] . identifier[wind_shear] ( identifier[shear] , identifier[unit_alt] , identifier[unit_wind] , identifier[spoken] = keyword[True] ) keyword[or] literal[string]
def wind_shear(shear: str, unit_alt: str='ft', unit_wind: str='kt') -> str: """ Format wind shear string into a spoken word string """ unit_alt = SPOKEN_UNITS.get(unit_alt, unit_alt) unit_wind = SPOKEN_UNITS.get(unit_wind, unit_wind) return translate.wind_shear(shear, unit_alt, unit_wind, spoken=True) or 'Wind shear unknown'
def connect_autoscale(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.autoscale.AutoScaleConnection` :return: A connection to Amazon's Auto Scaling Service """ from boto.ec2.autoscale import AutoScaleConnection return AutoScaleConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def function[connect_autoscale, parameter[aws_access_key_id, aws_secret_access_key]]: constant[ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.autoscale.AutoScaleConnection` :return: A connection to Amazon's Auto Scaling Service ] from relative_module[boto.ec2.autoscale] import module[AutoScaleConnection] return[call[name[AutoScaleConnection], parameter[name[aws_access_key_id], name[aws_secret_access_key]]]]
keyword[def] identifier[connect_autoscale] ( identifier[aws_access_key_id] = keyword[None] , identifier[aws_secret_access_key] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[boto] . identifier[ec2] . identifier[autoscale] keyword[import] identifier[AutoScaleConnection] keyword[return] identifier[AutoScaleConnection] ( identifier[aws_access_key_id] , identifier[aws_secret_access_key] ,** identifier[kwargs] )
def connect_autoscale(aws_access_key_id=None, aws_secret_access_key=None, **kwargs): """ :type aws_access_key_id: string :param aws_access_key_id: Your AWS Access Key ID :type aws_secret_access_key: string :param aws_secret_access_key: Your AWS Secret Access Key :rtype: :class:`boto.ec2.autoscale.AutoScaleConnection` :return: A connection to Amazon's Auto Scaling Service """ from boto.ec2.autoscale import AutoScaleConnection return AutoScaleConnection(aws_access_key_id, aws_secret_access_key, **kwargs)
def _elidable_begin(self, word): """Check word beginning to see if it is elidable. Elidable beginnings include: 1) A word begins with 'h' 2) A word begins with a vowel 3) A word begins with a diphthong :param word: syllabified/'qu' fixed word :return: True if the beginning of a word is elidable, otherwise False :rtype : bool """ if str(word[0]).startswith('h'): return True elif str(word[0][0]) in self.long_vowels: return True elif str(word[0][0] + word[0][-1]) in self.diphthongs: return True elif str(word[0][0]) in self.vowels: return True else: return False
def function[_elidable_begin, parameter[self, word]]: constant[Check word beginning to see if it is elidable. Elidable beginnings include: 1) A word begins with 'h' 2) A word begins with a vowel 3) A word begins with a diphthong :param word: syllabified/'qu' fixed word :return: True if the beginning of a word is elidable, otherwise False :rtype : bool ] if call[call[name[str], parameter[call[name[word]][constant[0]]]].startswith, parameter[constant[h]]] begin[:] return[constant[True]]
keyword[def] identifier[_elidable_begin] ( identifier[self] , identifier[word] ): literal[string] keyword[if] identifier[str] ( identifier[word] [ literal[int] ]). identifier[startswith] ( literal[string] ): keyword[return] keyword[True] keyword[elif] identifier[str] ( identifier[word] [ literal[int] ][ literal[int] ]) keyword[in] identifier[self] . identifier[long_vowels] : keyword[return] keyword[True] keyword[elif] identifier[str] ( identifier[word] [ literal[int] ][ literal[int] ]+ identifier[word] [ literal[int] ][- literal[int] ]) keyword[in] identifier[self] . identifier[diphthongs] : keyword[return] keyword[True] keyword[elif] identifier[str] ( identifier[word] [ literal[int] ][ literal[int] ]) keyword[in] identifier[self] . identifier[vowels] : keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def _elidable_begin(self, word): """Check word beginning to see if it is elidable. Elidable beginnings include: 1) A word begins with 'h' 2) A word begins with a vowel 3) A word begins with a diphthong :param word: syllabified/'qu' fixed word :return: True if the beginning of a word is elidable, otherwise False :rtype : bool """ if str(word[0]).startswith('h'): return True # depends on [control=['if'], data=[]] elif str(word[0][0]) in self.long_vowels: return True # depends on [control=['if'], data=[]] elif str(word[0][0] + word[0][-1]) in self.diphthongs: return True # depends on [control=['if'], data=[]] elif str(word[0][0]) in self.vowels: return True # depends on [control=['if'], data=[]] else: return False
def readAlignments(self, reads): """ Read lines of JSON from self._filename, convert them to read alignments and yield them. @param reads: An iterable of L{Read} instances, corresponding to the reads that were given to BLAST. @raise ValueError: If any of the lines in the file cannot be converted to JSON. @return: A generator that yields C{dark.alignments.ReadAlignments} instances. """ if self._fp is None: self._open(self._filename) reads = iter(reads) try: for lineNumber, line in enumerate(self._fp, start=2): try: record = loads(line[:-1]) except ValueError as e: raise ValueError( 'Could not convert line %d of %r to JSON (%s). ' 'Line is %r.' % (lineNumber, self._filename, e, line[:-1])) else: try: read = next(reads) except StopIteration: raise ValueError( 'Read generator failed to yield read number %d ' 'during parsing of BLAST file %r.' % (lineNumber - 1, self._filename)) else: alignments = self._dictToAlignments(record, read) yield ReadAlignments(read, alignments) finally: self._fp.close() self._fp = None
def function[readAlignments, parameter[self, reads]]: constant[ Read lines of JSON from self._filename, convert them to read alignments and yield them. @param reads: An iterable of L{Read} instances, corresponding to the reads that were given to BLAST. @raise ValueError: If any of the lines in the file cannot be converted to JSON. @return: A generator that yields C{dark.alignments.ReadAlignments} instances. ] if compare[name[self]._fp is constant[None]] begin[:] call[name[self]._open, parameter[name[self]._filename]] variable[reads] assign[=] call[name[iter], parameter[name[reads]]] <ast.Try object at 0x7da1b0ca7790>
keyword[def] identifier[readAlignments] ( identifier[self] , identifier[reads] ): literal[string] keyword[if] identifier[self] . identifier[_fp] keyword[is] keyword[None] : identifier[self] . identifier[_open] ( identifier[self] . identifier[_filename] ) identifier[reads] = identifier[iter] ( identifier[reads] ) keyword[try] : keyword[for] identifier[lineNumber] , identifier[line] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_fp] , identifier[start] = literal[int] ): keyword[try] : identifier[record] = identifier[loads] ( identifier[line] [:- literal[int] ]) keyword[except] identifier[ValueError] keyword[as] identifier[e] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] % ( identifier[lineNumber] , identifier[self] . identifier[_filename] , identifier[e] , identifier[line] [:- literal[int] ])) keyword[else] : keyword[try] : identifier[read] = identifier[next] ( identifier[reads] ) keyword[except] identifier[StopIteration] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] % ( identifier[lineNumber] - literal[int] , identifier[self] . identifier[_filename] )) keyword[else] : identifier[alignments] = identifier[self] . identifier[_dictToAlignments] ( identifier[record] , identifier[read] ) keyword[yield] identifier[ReadAlignments] ( identifier[read] , identifier[alignments] ) keyword[finally] : identifier[self] . identifier[_fp] . identifier[close] () identifier[self] . identifier[_fp] = keyword[None]
def readAlignments(self, reads): """ Read lines of JSON from self._filename, convert them to read alignments and yield them. @param reads: An iterable of L{Read} instances, corresponding to the reads that were given to BLAST. @raise ValueError: If any of the lines in the file cannot be converted to JSON. @return: A generator that yields C{dark.alignments.ReadAlignments} instances. """ if self._fp is None: self._open(self._filename) # depends on [control=['if'], data=[]] reads = iter(reads) try: for (lineNumber, line) in enumerate(self._fp, start=2): try: record = loads(line[:-1]) # depends on [control=['try'], data=[]] except ValueError as e: raise ValueError('Could not convert line %d of %r to JSON (%s). Line is %r.' % (lineNumber, self._filename, e, line[:-1])) # depends on [control=['except'], data=['e']] else: try: read = next(reads) # depends on [control=['try'], data=[]] except StopIteration: raise ValueError('Read generator failed to yield read number %d during parsing of BLAST file %r.' % (lineNumber - 1, self._filename)) # depends on [control=['except'], data=[]] else: alignments = self._dictToAlignments(record, read) yield ReadAlignments(read, alignments) # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] finally: self._fp.close() self._fp = None
def formatwarning(message, category, filename, lineno, line=None): """ Override default Warning layout, from: /PATH/TO/dutree.py:326: UserWarning: [Errno 2] No such file or directory: '/0.d/05.d' warnings.warn(str(e)) To: dutree.py:330: UserWarning: [Errno 2] No such file or directory: '/0.d/05.d' """ return '{basename}:{lineno}: {category}: {message}\n'.format( basename=path.basename(filename), lineno=lineno, category=category.__name__, message=message)
def function[formatwarning, parameter[message, category, filename, lineno, line]]: constant[ Override default Warning layout, from: /PATH/TO/dutree.py:326: UserWarning: [Errno 2] No such file or directory: '/0.d/05.d' warnings.warn(str(e)) To: dutree.py:330: UserWarning: [Errno 2] No such file or directory: '/0.d/05.d' ] return[call[constant[{basename}:{lineno}: {category}: {message} ].format, parameter[]]]
keyword[def] identifier[formatwarning] ( identifier[message] , identifier[category] , identifier[filename] , identifier[lineno] , identifier[line] = keyword[None] ): literal[string] keyword[return] literal[string] . identifier[format] ( identifier[basename] = identifier[path] . identifier[basename] ( identifier[filename] ), identifier[lineno] = identifier[lineno] , identifier[category] = identifier[category] . identifier[__name__] , identifier[message] = identifier[message] )
def formatwarning(message, category, filename, lineno, line=None): """ Override default Warning layout, from: /PATH/TO/dutree.py:326: UserWarning: [Errno 2] No such file or directory: '/0.d/05.d' warnings.warn(str(e)) To: dutree.py:330: UserWarning: [Errno 2] No such file or directory: '/0.d/05.d' """ return '{basename}:{lineno}: {category}: {message}\n'.format(basename=path.basename(filename), lineno=lineno, category=category.__name__, message=message)
def remote_sys_desc_uneq_store(self, remote_system_desc): """This function saves the system desc, if different from stored. """ if remote_system_desc != self.remote_system_desc: self.remote_system_desc = remote_system_desc return True return False
def function[remote_sys_desc_uneq_store, parameter[self, remote_system_desc]]: constant[This function saves the system desc, if different from stored. ] if compare[name[remote_system_desc] not_equal[!=] name[self].remote_system_desc] begin[:] name[self].remote_system_desc assign[=] name[remote_system_desc] return[constant[True]] return[constant[False]]
keyword[def] identifier[remote_sys_desc_uneq_store] ( identifier[self] , identifier[remote_system_desc] ): literal[string] keyword[if] identifier[remote_system_desc] != identifier[self] . identifier[remote_system_desc] : identifier[self] . identifier[remote_system_desc] = identifier[remote_system_desc] keyword[return] keyword[True] keyword[return] keyword[False]
def remote_sys_desc_uneq_store(self, remote_system_desc): """This function saves the system desc, if different from stored. """ if remote_system_desc != self.remote_system_desc: self.remote_system_desc = remote_system_desc return True # depends on [control=['if'], data=['remote_system_desc']] return False
def format_unix_var(text): """ Example:: this_is_very_good """ text = text.strip() if len(text) == 0: # if empty string, return it raise ValueError("can not be empty string!") else: if text[0] in string.digits: raise ValueError("variable can not start with digits!") text = text.lower() # delete redundant empty space words = list() word = list() for char in text: if char in ALPHA_DIGITS: word.append(char) else: if len(word): words.append("".join(word)) word = list() if len(word): words.append("".join(word)) return "_".join(words)
def function[format_unix_var, parameter[text]]: constant[ Example:: this_is_very_good ] variable[text] assign[=] call[name[text].strip, parameter[]] if compare[call[name[len], parameter[name[text]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b2506ad0>
keyword[def] identifier[format_unix_var] ( identifier[text] ): literal[string] identifier[text] = identifier[text] . identifier[strip] () keyword[if] identifier[len] ( identifier[text] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : keyword[if] identifier[text] [ literal[int] ] keyword[in] identifier[string] . identifier[digits] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[text] = identifier[text] . identifier[lower] () identifier[words] = identifier[list] () identifier[word] = identifier[list] () keyword[for] identifier[char] keyword[in] identifier[text] : keyword[if] identifier[char] keyword[in] identifier[ALPHA_DIGITS] : identifier[word] . identifier[append] ( identifier[char] ) keyword[else] : keyword[if] identifier[len] ( identifier[word] ): identifier[words] . identifier[append] ( literal[string] . identifier[join] ( identifier[word] )) identifier[word] = identifier[list] () keyword[if] identifier[len] ( identifier[word] ): identifier[words] . identifier[append] ( literal[string] . identifier[join] ( identifier[word] )) keyword[return] literal[string] . identifier[join] ( identifier[words] )
def format_unix_var(text): """ Example:: this_is_very_good """ text = text.strip() if len(text) == 0: # if empty string, return it raise ValueError('can not be empty string!') # depends on [control=['if'], data=[]] else: if text[0] in string.digits: raise ValueError('variable can not start with digits!') # depends on [control=['if'], data=[]] text = text.lower() # delete redundant empty space words = list() word = list() for char in text: if char in ALPHA_DIGITS: word.append(char) # depends on [control=['if'], data=['char']] elif len(word): words.append(''.join(word)) word = list() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['char']] if len(word): words.append(''.join(word)) # depends on [control=['if'], data=[]] return '_'.join(words)
def update_adjustments(self, adjustments, method): """ Merge ``adjustments`` with existing adjustments, handling index collisions according to ``method``. Parameters ---------- adjustments : dict[int -> list[Adjustment]] The mapping of row indices to lists of adjustments that should be appended to existing adjustments. method : {'append', 'prepend'} How to handle index collisions. If 'append', new adjustments will be applied after previously-existing adjustments. If 'prepend', new adjustments will be applied before previously-existing adjustments. """ try: merge_func = _merge_methods[method] except KeyError: raise ValueError( "Invalid merge method %s\n" "Valid methods are: %s" % (method, ', '.join(_merge_methods)) ) self.adjustments = merge_with( merge_func, self.adjustments, adjustments, )
def function[update_adjustments, parameter[self, adjustments, method]]: constant[ Merge ``adjustments`` with existing adjustments, handling index collisions according to ``method``. Parameters ---------- adjustments : dict[int -> list[Adjustment]] The mapping of row indices to lists of adjustments that should be appended to existing adjustments. method : {'append', 'prepend'} How to handle index collisions. If 'append', new adjustments will be applied after previously-existing adjustments. If 'prepend', new adjustments will be applied before previously-existing adjustments. ] <ast.Try object at 0x7da1b2043700> name[self].adjustments assign[=] call[name[merge_with], parameter[name[merge_func], name[self].adjustments, name[adjustments]]]
keyword[def] identifier[update_adjustments] ( identifier[self] , identifier[adjustments] , identifier[method] ): literal[string] keyword[try] : identifier[merge_func] = identifier[_merge_methods] [ identifier[method] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[method] , literal[string] . identifier[join] ( identifier[_merge_methods] )) ) identifier[self] . identifier[adjustments] = identifier[merge_with] ( identifier[merge_func] , identifier[self] . identifier[adjustments] , identifier[adjustments] , )
def update_adjustments(self, adjustments, method): """ Merge ``adjustments`` with existing adjustments, handling index collisions according to ``method``. Parameters ---------- adjustments : dict[int -> list[Adjustment]] The mapping of row indices to lists of adjustments that should be appended to existing adjustments. method : {'append', 'prepend'} How to handle index collisions. If 'append', new adjustments will be applied after previously-existing adjustments. If 'prepend', new adjustments will be applied before previously-existing adjustments. """ try: merge_func = _merge_methods[method] # depends on [control=['try'], data=[]] except KeyError: raise ValueError('Invalid merge method %s\nValid methods are: %s' % (method, ', '.join(_merge_methods))) # depends on [control=['except'], data=[]] self.adjustments = merge_with(merge_func, self.adjustments, adjustments)
def start(self): ''' Startup the kafka consumer. ''' log.debug('Creating the consumer using the bootstrap servers: %s and the group ID: %s', self.bootstrap_servers, self.group_id) try: self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers, group_id=self.group_id) except kafka.errors.NoBrokersAvailable as err: log.error(err, exc_info=True) raise ListenerException(err) log.debug('Subscribing to the %s topic', self.topic) self.consumer.subscribe(topics=[self.topic])
def function[start, parameter[self]]: constant[ Startup the kafka consumer. ] call[name[log].debug, parameter[constant[Creating the consumer using the bootstrap servers: %s and the group ID: %s], name[self].bootstrap_servers, name[self].group_id]] <ast.Try object at 0x7da18bcca890> call[name[log].debug, parameter[constant[Subscribing to the %s topic], name[self].topic]] call[name[self].consumer.subscribe, parameter[]]
keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[bootstrap_servers] , identifier[self] . identifier[group_id] ) keyword[try] : identifier[self] . identifier[consumer] = identifier[kafka] . identifier[KafkaConsumer] ( identifier[bootstrap_servers] = identifier[self] . identifier[bootstrap_servers] , identifier[group_id] = identifier[self] . identifier[group_id] ) keyword[except] identifier[kafka] . identifier[errors] . identifier[NoBrokersAvailable] keyword[as] identifier[err] : identifier[log] . identifier[error] ( identifier[err] , identifier[exc_info] = keyword[True] ) keyword[raise] identifier[ListenerException] ( identifier[err] ) identifier[log] . identifier[debug] ( literal[string] , identifier[self] . identifier[topic] ) identifier[self] . identifier[consumer] . identifier[subscribe] ( identifier[topics] =[ identifier[self] . identifier[topic] ])
def start(self): """ Startup the kafka consumer. """ log.debug('Creating the consumer using the bootstrap servers: %s and the group ID: %s', self.bootstrap_servers, self.group_id) try: self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers, group_id=self.group_id) # depends on [control=['try'], data=[]] except kafka.errors.NoBrokersAvailable as err: log.error(err, exc_info=True) raise ListenerException(err) # depends on [control=['except'], data=['err']] log.debug('Subscribing to the %s topic', self.topic) self.consumer.subscribe(topics=[self.topic])
def check_num_slices(num_slices, img_shape=None, num_dims=3): """Ensures requested number of slices is valid. Atleast 1 and atmost the image size, if available """ if not isinstance(num_slices, Iterable) or len(num_slices) == 1: num_slices = np.repeat(num_slices, num_dims) if img_shape is not None: if len(num_slices) != len(img_shape): raise ValueError('The number of dimensions requested is different from image.' ' Must be either 1 or equal to {}'.format(len(img_shape) + 1)) # upper bounding them to image shape num_slices = np.minimum(img_shape, num_slices) # lower bounding it to 1 return np.maximum(1, num_slices)
def function[check_num_slices, parameter[num_slices, img_shape, num_dims]]: constant[Ensures requested number of slices is valid. Atleast 1 and atmost the image size, if available ] if <ast.BoolOp object at 0x7da1b2547a00> begin[:] variable[num_slices] assign[=] call[name[np].repeat, parameter[name[num_slices], name[num_dims]]] if compare[name[img_shape] is_not constant[None]] begin[:] if compare[call[name[len], parameter[name[num_slices]]] not_equal[!=] call[name[len], parameter[name[img_shape]]]] begin[:] <ast.Raise object at 0x7da18ede4a00> variable[num_slices] assign[=] call[name[np].minimum, parameter[name[img_shape], name[num_slices]]] return[call[name[np].maximum, parameter[constant[1], name[num_slices]]]]
keyword[def] identifier[check_num_slices] ( identifier[num_slices] , identifier[img_shape] = keyword[None] , identifier[num_dims] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[num_slices] , identifier[Iterable] ) keyword[or] identifier[len] ( identifier[num_slices] )== literal[int] : identifier[num_slices] = identifier[np] . identifier[repeat] ( identifier[num_slices] , identifier[num_dims] ) keyword[if] identifier[img_shape] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[len] ( identifier[num_slices] )!= identifier[len] ( identifier[img_shape] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[len] ( identifier[img_shape] )+ literal[int] )) identifier[num_slices] = identifier[np] . identifier[minimum] ( identifier[img_shape] , identifier[num_slices] ) keyword[return] identifier[np] . identifier[maximum] ( literal[int] , identifier[num_slices] )
def check_num_slices(num_slices, img_shape=None, num_dims=3): """Ensures requested number of slices is valid. Atleast 1 and atmost the image size, if available """ if not isinstance(num_slices, Iterable) or len(num_slices) == 1: num_slices = np.repeat(num_slices, num_dims) # depends on [control=['if'], data=[]] if img_shape is not None: if len(num_slices) != len(img_shape): raise ValueError('The number of dimensions requested is different from image. Must be either 1 or equal to {}'.format(len(img_shape) + 1)) # depends on [control=['if'], data=[]] # upper bounding them to image shape num_slices = np.minimum(img_shape, num_slices) # depends on [control=['if'], data=['img_shape']] # lower bounding it to 1 return np.maximum(1, num_slices)
def updateLink(page, lnk): """ Update a link on the current page. """ CheckParent(page) annot = getLinkText(page, lnk) if annot == "": raise ValueError("link kind not supported") page.parent._updateObject(lnk["xref"], annot, page = page) return
def function[updateLink, parameter[page, lnk]]: constant[ Update a link on the current page. ] call[name[CheckParent], parameter[name[page]]] variable[annot] assign[=] call[name[getLinkText], parameter[name[page], name[lnk]]] if compare[name[annot] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da1b186ef50> call[name[page].parent._updateObject, parameter[call[name[lnk]][constant[xref]], name[annot]]] return[None]
keyword[def] identifier[updateLink] ( identifier[page] , identifier[lnk] ): literal[string] identifier[CheckParent] ( identifier[page] ) identifier[annot] = identifier[getLinkText] ( identifier[page] , identifier[lnk] ) keyword[if] identifier[annot] == literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[page] . identifier[parent] . identifier[_updateObject] ( identifier[lnk] [ literal[string] ], identifier[annot] , identifier[page] = identifier[page] ) keyword[return]
def updateLink(page, lnk): """ Update a link on the current page. """ CheckParent(page) annot = getLinkText(page, lnk) if annot == '': raise ValueError('link kind not supported') # depends on [control=['if'], data=[]] page.parent._updateObject(lnk['xref'], annot, page=page) return
def fill_shifts_view(request, semester): """ Allows managers to quickly fill in the default workshifts for a few given workshift pools. """ page_name = "Fill Shifts" fill_regular_shifts_form = None fill_social_shifts_form = None fill_humor_shifts_form = None fill_bathroom_shifts_form = None fill_hi_shifts_form = None reset_all_shifts_form = None managers = Manager.objects.filter(incumbent__user=request.user) admin = utils.can_manage(request.user, semester=semester) if admin: fill_regular_shifts_form = FillRegularShiftsForm( data=request.POST, semester=semester, ) fill_humor_shifts_form = FillHumorShiftsForm( data=request.POST, semester=semester, ) fill_bathroom_shifts_form = FillBathroomShiftsForm( data=request.POST, semester=semester, ) reset_all_shifts_form = ResetAllShiftsForm( data=request.POST, semester=semester, ) # XXX: BAD! We should filter by pool owners? By Manager bool flags? By # arbitrary django permissions? if admin or managers.filter(title="Social Manager"): fill_social_shifts_form = FillSocialShiftsForm( data=request.POST, semester=semester, ) # XXX: See above if admin or managers.filter(title="Maintenance Manager"): fill_hi_shifts_form = FillHIShiftsForm( data=request.POST, semester=semester, ) fill_forms = [ fill_regular_shifts_form, fill_social_shifts_form, fill_humor_shifts_form, fill_bathroom_shifts_form, fill_hi_shifts_form, reset_all_shifts_form, ] fill_forms = [ form for form in fill_forms if form is not None ] for form in fill_forms: if form and form.is_valid(): count = form.save() messages.add_message( request, messages.INFO, "{} {} {}".format( form.message, count, p.plural("workshift", count), ), ) return HttpResponseRedirect(wurl( "workshift:fill_shifts", sem_url=semester.sem_url, )) return render_to_response("fill_shifts.html", { "page_name": page_name, "forms": fill_forms, }, context_instance=RequestContext(request))
def function[fill_shifts_view, parameter[request, semester]]: constant[ Allows managers to quickly fill in the default workshifts for a few given workshift pools. ] variable[page_name] assign[=] constant[Fill Shifts] variable[fill_regular_shifts_form] assign[=] constant[None] variable[fill_social_shifts_form] assign[=] constant[None] variable[fill_humor_shifts_form] assign[=] constant[None] variable[fill_bathroom_shifts_form] assign[=] constant[None] variable[fill_hi_shifts_form] assign[=] constant[None] variable[reset_all_shifts_form] assign[=] constant[None] variable[managers] assign[=] call[name[Manager].objects.filter, parameter[]] variable[admin] assign[=] call[name[utils].can_manage, parameter[name[request].user]] if name[admin] begin[:] variable[fill_regular_shifts_form] assign[=] call[name[FillRegularShiftsForm], parameter[]] variable[fill_humor_shifts_form] assign[=] call[name[FillHumorShiftsForm], parameter[]] variable[fill_bathroom_shifts_form] assign[=] call[name[FillBathroomShiftsForm], parameter[]] variable[reset_all_shifts_form] assign[=] call[name[ResetAllShiftsForm], parameter[]] if <ast.BoolOp object at 0x7da1b1494730> begin[:] variable[fill_social_shifts_form] assign[=] call[name[FillSocialShiftsForm], parameter[]] if <ast.BoolOp object at 0x7da1b1497a60> begin[:] variable[fill_hi_shifts_form] assign[=] call[name[FillHIShiftsForm], parameter[]] variable[fill_forms] assign[=] list[[<ast.Name object at 0x7da1b1495690>, <ast.Name object at 0x7da1b1495660>, <ast.Name object at 0x7da1b1495630>, <ast.Name object at 0x7da1b1495600>, <ast.Name object at 0x7da1b14955a0>, <ast.Name object at 0x7da18bc73850>]] variable[fill_forms] assign[=] <ast.ListComp object at 0x7da18bc72320> for taget[name[form]] in starred[name[fill_forms]] begin[:] if <ast.BoolOp object at 0x7da18bc73670> begin[:] variable[count] assign[=] call[name[form].save, parameter[]] call[name[messages].add_message, parameter[name[request], name[messages].INFO, call[constant[{} {} {}].format, parameter[name[form].message, name[count], call[name[p].plural, parameter[constant[workshift], name[count]]]]]]] return[call[name[HttpResponseRedirect], parameter[call[name[wurl], parameter[constant[workshift:fill_shifts]]]]]] return[call[name[render_to_response], parameter[constant[fill_shifts.html], dictionary[[<ast.Constant object at 0x7da18bc729e0>, <ast.Constant object at 0x7da18bc72ec0>], [<ast.Name object at 0x7da18bc73ca0>, <ast.Name object at 0x7da18bc73df0>]]]]]
keyword[def] identifier[fill_shifts_view] ( identifier[request] , identifier[semester] ): literal[string] identifier[page_name] = literal[string] identifier[fill_regular_shifts_form] = keyword[None] identifier[fill_social_shifts_form] = keyword[None] identifier[fill_humor_shifts_form] = keyword[None] identifier[fill_bathroom_shifts_form] = keyword[None] identifier[fill_hi_shifts_form] = keyword[None] identifier[reset_all_shifts_form] = keyword[None] identifier[managers] = identifier[Manager] . identifier[objects] . identifier[filter] ( identifier[incumbent__user] = identifier[request] . identifier[user] ) identifier[admin] = identifier[utils] . identifier[can_manage] ( identifier[request] . identifier[user] , identifier[semester] = identifier[semester] ) keyword[if] identifier[admin] : identifier[fill_regular_shifts_form] = identifier[FillRegularShiftsForm] ( identifier[data] = identifier[request] . identifier[POST] , identifier[semester] = identifier[semester] , ) identifier[fill_humor_shifts_form] = identifier[FillHumorShiftsForm] ( identifier[data] = identifier[request] . identifier[POST] , identifier[semester] = identifier[semester] , ) identifier[fill_bathroom_shifts_form] = identifier[FillBathroomShiftsForm] ( identifier[data] = identifier[request] . identifier[POST] , identifier[semester] = identifier[semester] , ) identifier[reset_all_shifts_form] = identifier[ResetAllShiftsForm] ( identifier[data] = identifier[request] . identifier[POST] , identifier[semester] = identifier[semester] , ) keyword[if] identifier[admin] keyword[or] identifier[managers] . identifier[filter] ( identifier[title] = literal[string] ): identifier[fill_social_shifts_form] = identifier[FillSocialShiftsForm] ( identifier[data] = identifier[request] . identifier[POST] , identifier[semester] = identifier[semester] , ) keyword[if] identifier[admin] keyword[or] identifier[managers] . identifier[filter] ( identifier[title] = literal[string] ): identifier[fill_hi_shifts_form] = identifier[FillHIShiftsForm] ( identifier[data] = identifier[request] . identifier[POST] , identifier[semester] = identifier[semester] , ) identifier[fill_forms] =[ identifier[fill_regular_shifts_form] , identifier[fill_social_shifts_form] , identifier[fill_humor_shifts_form] , identifier[fill_bathroom_shifts_form] , identifier[fill_hi_shifts_form] , identifier[reset_all_shifts_form] , ] identifier[fill_forms] =[ identifier[form] keyword[for] identifier[form] keyword[in] identifier[fill_forms] keyword[if] identifier[form] keyword[is] keyword[not] keyword[None] ] keyword[for] identifier[form] keyword[in] identifier[fill_forms] : keyword[if] identifier[form] keyword[and] identifier[form] . identifier[is_valid] (): identifier[count] = identifier[form] . identifier[save] () identifier[messages] . identifier[add_message] ( identifier[request] , identifier[messages] . identifier[INFO] , literal[string] . identifier[format] ( identifier[form] . identifier[message] , identifier[count] , identifier[p] . identifier[plural] ( literal[string] , identifier[count] ), ), ) keyword[return] identifier[HttpResponseRedirect] ( identifier[wurl] ( literal[string] , identifier[sem_url] = identifier[semester] . identifier[sem_url] , )) keyword[return] identifier[render_to_response] ( literal[string] ,{ literal[string] : identifier[page_name] , literal[string] : identifier[fill_forms] , }, identifier[context_instance] = identifier[RequestContext] ( identifier[request] ))
def fill_shifts_view(request, semester): """ Allows managers to quickly fill in the default workshifts for a few given workshift pools. """ page_name = 'Fill Shifts' fill_regular_shifts_form = None fill_social_shifts_form = None fill_humor_shifts_form = None fill_bathroom_shifts_form = None fill_hi_shifts_form = None reset_all_shifts_form = None managers = Manager.objects.filter(incumbent__user=request.user) admin = utils.can_manage(request.user, semester=semester) if admin: fill_regular_shifts_form = FillRegularShiftsForm(data=request.POST, semester=semester) fill_humor_shifts_form = FillHumorShiftsForm(data=request.POST, semester=semester) fill_bathroom_shifts_form = FillBathroomShiftsForm(data=request.POST, semester=semester) reset_all_shifts_form = ResetAllShiftsForm(data=request.POST, semester=semester) # depends on [control=['if'], data=[]] # XXX: BAD! We should filter by pool owners? By Manager bool flags? By # arbitrary django permissions? if admin or managers.filter(title='Social Manager'): fill_social_shifts_form = FillSocialShiftsForm(data=request.POST, semester=semester) # depends on [control=['if'], data=[]] # XXX: See above if admin or managers.filter(title='Maintenance Manager'): fill_hi_shifts_form = FillHIShiftsForm(data=request.POST, semester=semester) # depends on [control=['if'], data=[]] fill_forms = [fill_regular_shifts_form, fill_social_shifts_form, fill_humor_shifts_form, fill_bathroom_shifts_form, fill_hi_shifts_form, reset_all_shifts_form] fill_forms = [form for form in fill_forms if form is not None] for form in fill_forms: if form and form.is_valid(): count = form.save() messages.add_message(request, messages.INFO, '{} {} {}'.format(form.message, count, p.plural('workshift', count))) return HttpResponseRedirect(wurl('workshift:fill_shifts', sem_url=semester.sem_url)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['form']] return render_to_response('fill_shifts.html', {'page_name': page_name, 'forms': fill_forms}, context_instance=RequestContext(request))
def _fixNS(self, namespace): """Convert an input value into the internally used values of this object @param namespace: The string or constant to convert @type namespace: str or unicode or BARE_NS or OPENID_NS """ if namespace == OPENID_NS: if self._openid_ns_uri is None: raise UndefinedOpenIDNamespace('OpenID namespace not set') else: namespace = self._openid_ns_uri if namespace != BARE_NS and type(namespace) not in [str, unicode]: raise TypeError( "Namespace must be BARE_NS, OPENID_NS or a string. got %r" % (namespace,)) if namespace != BARE_NS and ':' not in namespace: fmt = 'OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r' warnings.warn(fmt % (namespace,), DeprecationWarning) if namespace == 'sreg': fmt = 'Using %r instead of "sreg" as namespace' warnings.warn(fmt % (SREG_URI,), DeprecationWarning,) return SREG_URI return namespace
def function[_fixNS, parameter[self, namespace]]: constant[Convert an input value into the internally used values of this object @param namespace: The string or constant to convert @type namespace: str or unicode or BARE_NS or OPENID_NS ] if compare[name[namespace] equal[==] name[OPENID_NS]] begin[:] if compare[name[self]._openid_ns_uri is constant[None]] begin[:] <ast.Raise object at 0x7da18fe92020> if <ast.BoolOp object at 0x7da18fe90eb0> begin[:] <ast.Raise object at 0x7da18fe928c0> if <ast.BoolOp object at 0x7da18fe91030> begin[:] variable[fmt] assign[=] constant[OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r] call[name[warnings].warn, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18fe91270>]]], name[DeprecationWarning]]] if compare[name[namespace] equal[==] constant[sreg]] begin[:] variable[fmt] assign[=] constant[Using %r instead of "sreg" as namespace] call[name[warnings].warn, parameter[binary_operation[name[fmt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f9b8b0>]]], name[DeprecationWarning]]] return[name[SREG_URI]] return[name[namespace]]
keyword[def] identifier[_fixNS] ( identifier[self] , identifier[namespace] ): literal[string] keyword[if] identifier[namespace] == identifier[OPENID_NS] : keyword[if] identifier[self] . identifier[_openid_ns_uri] keyword[is] keyword[None] : keyword[raise] identifier[UndefinedOpenIDNamespace] ( literal[string] ) keyword[else] : identifier[namespace] = identifier[self] . identifier[_openid_ns_uri] keyword[if] identifier[namespace] != identifier[BARE_NS] keyword[and] identifier[type] ( identifier[namespace] ) keyword[not] keyword[in] [ identifier[str] , identifier[unicode] ]: keyword[raise] identifier[TypeError] ( literal[string] %( identifier[namespace] ,)) keyword[if] identifier[namespace] != identifier[BARE_NS] keyword[and] literal[string] keyword[not] keyword[in] identifier[namespace] : identifier[fmt] = literal[string] identifier[warnings] . identifier[warn] ( identifier[fmt] %( identifier[namespace] ,), identifier[DeprecationWarning] ) keyword[if] identifier[namespace] == literal[string] : identifier[fmt] = literal[string] identifier[warnings] . identifier[warn] ( identifier[fmt] %( identifier[SREG_URI] ,), identifier[DeprecationWarning] ,) keyword[return] identifier[SREG_URI] keyword[return] identifier[namespace]
def _fixNS(self, namespace): """Convert an input value into the internally used values of this object @param namespace: The string or constant to convert @type namespace: str or unicode or BARE_NS or OPENID_NS """ if namespace == OPENID_NS: if self._openid_ns_uri is None: raise UndefinedOpenIDNamespace('OpenID namespace not set') # depends on [control=['if'], data=[]] else: namespace = self._openid_ns_uri # depends on [control=['if'], data=['namespace']] if namespace != BARE_NS and type(namespace) not in [str, unicode]: raise TypeError('Namespace must be BARE_NS, OPENID_NS or a string. got %r' % (namespace,)) # depends on [control=['if'], data=[]] if namespace != BARE_NS and ':' not in namespace: fmt = 'OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r' warnings.warn(fmt % (namespace,), DeprecationWarning) if namespace == 'sreg': fmt = 'Using %r instead of "sreg" as namespace' warnings.warn(fmt % (SREG_URI,), DeprecationWarning) return SREG_URI # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return namespace
def qname(self, stmt): """Return (prefixed) node name of `stmt`. The result is prefixed with the local prefix unless we are inside a global grouping. """ if self.gg_level: return stmt.arg return self.prefix_stack[-1] + ":" + stmt.arg
def function[qname, parameter[self, stmt]]: constant[Return (prefixed) node name of `stmt`. The result is prefixed with the local prefix unless we are inside a global grouping. ] if name[self].gg_level begin[:] return[name[stmt].arg] return[binary_operation[binary_operation[call[name[self].prefix_stack][<ast.UnaryOp object at 0x7da18c4cccd0>] + constant[:]] + name[stmt].arg]]
keyword[def] identifier[qname] ( identifier[self] , identifier[stmt] ): literal[string] keyword[if] identifier[self] . identifier[gg_level] : keyword[return] identifier[stmt] . identifier[arg] keyword[return] identifier[self] . identifier[prefix_stack] [- literal[int] ]+ literal[string] + identifier[stmt] . identifier[arg]
def qname(self, stmt): """Return (prefixed) node name of `stmt`. The result is prefixed with the local prefix unless we are inside a global grouping. """ if self.gg_level: return stmt.arg # depends on [control=['if'], data=[]] return self.prefix_stack[-1] + ':' + stmt.arg
def get_paths(rlz): """ :param rlz: a logic tree realization (composite or simple) :returns: a dict {'source_model_tree_path': string, 'gsim_tree_path': string} """ dic = {} if hasattr(rlz, 'sm_lt_path'): # composite realization dic['source_model_tree_path'] = '_'.join(rlz.sm_lt_path) dic['gsim_tree_path'] = '_'.join(rlz.gsim_lt_path) else: # simple GSIM realization dic['source_model_tree_path'] = '' dic['gsim_tree_path'] = '_'.join(rlz.lt_path) return dic
def function[get_paths, parameter[rlz]]: constant[ :param rlz: a logic tree realization (composite or simple) :returns: a dict {'source_model_tree_path': string, 'gsim_tree_path': string} ] variable[dic] assign[=] dictionary[[], []] if call[name[hasattr], parameter[name[rlz], constant[sm_lt_path]]] begin[:] call[name[dic]][constant[source_model_tree_path]] assign[=] call[constant[_].join, parameter[name[rlz].sm_lt_path]] call[name[dic]][constant[gsim_tree_path]] assign[=] call[constant[_].join, parameter[name[rlz].gsim_lt_path]] return[name[dic]]
keyword[def] identifier[get_paths] ( identifier[rlz] ): literal[string] identifier[dic] ={} keyword[if] identifier[hasattr] ( identifier[rlz] , literal[string] ): identifier[dic] [ literal[string] ]= literal[string] . identifier[join] ( identifier[rlz] . identifier[sm_lt_path] ) identifier[dic] [ literal[string] ]= literal[string] . identifier[join] ( identifier[rlz] . identifier[gsim_lt_path] ) keyword[else] : identifier[dic] [ literal[string] ]= literal[string] identifier[dic] [ literal[string] ]= literal[string] . identifier[join] ( identifier[rlz] . identifier[lt_path] ) keyword[return] identifier[dic]
def get_paths(rlz): """ :param rlz: a logic tree realization (composite or simple) :returns: a dict {'source_model_tree_path': string, 'gsim_tree_path': string} """ dic = {} if hasattr(rlz, 'sm_lt_path'): # composite realization dic['source_model_tree_path'] = '_'.join(rlz.sm_lt_path) dic['gsim_tree_path'] = '_'.join(rlz.gsim_lt_path) # depends on [control=['if'], data=[]] else: # simple GSIM realization dic['source_model_tree_path'] = '' dic['gsim_tree_path'] = '_'.join(rlz.lt_path) return dic
def max_speed(self): """ Returns the maximum value that is accepted by the `speed_sp` attribute. This may be slightly different than the maximum speed that a particular motor can reach - it's the maximum theoretical speed. """ (self._max_speed, value) = self.get_cached_attr_int(self._max_speed, 'max_speed') return value
def function[max_speed, parameter[self]]: constant[ Returns the maximum value that is accepted by the `speed_sp` attribute. This may be slightly different than the maximum speed that a particular motor can reach - it's the maximum theoretical speed. ] <ast.Tuple object at 0x7da1b1644dc0> assign[=] call[name[self].get_cached_attr_int, parameter[name[self]._max_speed, constant[max_speed]]] return[name[value]]
keyword[def] identifier[max_speed] ( identifier[self] ): literal[string] ( identifier[self] . identifier[_max_speed] , identifier[value] )= identifier[self] . identifier[get_cached_attr_int] ( identifier[self] . identifier[_max_speed] , literal[string] ) keyword[return] identifier[value]
def max_speed(self): """ Returns the maximum value that is accepted by the `speed_sp` attribute. This may be slightly different than the maximum speed that a particular motor can reach - it's the maximum theoretical speed. """ (self._max_speed, value) = self.get_cached_attr_int(self._max_speed, 'max_speed') return value
def process_input(self, question): """ takes a question and returns the best answer based on known skills """ ans = '' if self.status == 'EXIT': print('bye') sys.exit() if '?' in question: ans = self.info.find_answer(question) elif question.startswith(':LIST'): ans = 'List of Raw Input\n' for i in self.info.raw_input: ans += str(i) + '\n' else: #ans = 'I dont'' know' ans = 'Adding info..' self.info.raw_input.append(question) self.lg.record_process('aggie.py', 'Question > ' + question) self.lg.record_process('aggie.py', 'Answer > ' + ans) return ans
def function[process_input, parameter[self, question]]: constant[ takes a question and returns the best answer based on known skills ] variable[ans] assign[=] constant[] if compare[name[self].status equal[==] constant[EXIT]] begin[:] call[name[print], parameter[constant[bye]]] call[name[sys].exit, parameter[]] if compare[constant[?] in name[question]] begin[:] variable[ans] assign[=] call[name[self].info.find_answer, parameter[name[question]]] call[name[self].lg.record_process, parameter[constant[aggie.py], binary_operation[constant[Question > ] + name[question]]]] call[name[self].lg.record_process, parameter[constant[aggie.py], binary_operation[constant[Answer > ] + name[ans]]]] return[name[ans]]
keyword[def] identifier[process_input] ( identifier[self] , identifier[question] ): literal[string] identifier[ans] = literal[string] keyword[if] identifier[self] . identifier[status] == literal[string] : identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] () keyword[if] literal[string] keyword[in] identifier[question] : identifier[ans] = identifier[self] . identifier[info] . identifier[find_answer] ( identifier[question] ) keyword[elif] identifier[question] . identifier[startswith] ( literal[string] ): identifier[ans] = literal[string] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[info] . identifier[raw_input] : identifier[ans] += identifier[str] ( identifier[i] )+ literal[string] keyword[else] : identifier[ans] = literal[string] identifier[self] . identifier[info] . identifier[raw_input] . identifier[append] ( identifier[question] ) identifier[self] . identifier[lg] . identifier[record_process] ( literal[string] , literal[string] + identifier[question] ) identifier[self] . identifier[lg] . identifier[record_process] ( literal[string] , literal[string] + identifier[ans] ) keyword[return] identifier[ans]
def process_input(self, question): """ takes a question and returns the best answer based on known skills """ ans = '' if self.status == 'EXIT': print('bye') sys.exit() # depends on [control=['if'], data=[]] if '?' in question: ans = self.info.find_answer(question) # depends on [control=['if'], data=['question']] elif question.startswith(':LIST'): ans = 'List of Raw Input\n' for i in self.info.raw_input: ans += str(i) + '\n' # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: #ans = 'I dont'' know' ans = 'Adding info..' self.info.raw_input.append(question) self.lg.record_process('aggie.py', 'Question > ' + question) self.lg.record_process('aggie.py', 'Answer > ' + ans) return ans
def _folder_item_duedate(self, analysis_brain, item): """Set the analysis' due date to the item passed in. :param analysis_brain: Brain that represents an analysis :param item: analysis' dictionary counterpart that represents a row """ # Note that if the analysis is a Reference Analysis, `getDueDate` # returns the date when the ReferenceSample expires. If the analysis is # a duplicate, `getDueDate` returns the due date of the source analysis due_date = analysis_brain.getDueDate if not due_date: return None due_date_str = self.ulocalized_time(due_date, long_format=0) item['DueDate'] = due_date_str # If the Analysis is late/overdue, display an icon capture_date = analysis_brain.getResultCaptureDate capture_date = capture_date or DateTime() if capture_date > due_date: # The analysis is late or overdue img = get_image('late.png', title=t(_("Late Analysis")), width='16px', height='16px') item['replace']['DueDate'] = '{} {}'.format(due_date_str, img)
def function[_folder_item_duedate, parameter[self, analysis_brain, item]]: constant[Set the analysis' due date to the item passed in. :param analysis_brain: Brain that represents an analysis :param item: analysis' dictionary counterpart that represents a row ] variable[due_date] assign[=] name[analysis_brain].getDueDate if <ast.UnaryOp object at 0x7da18f00e860> begin[:] return[constant[None]] variable[due_date_str] assign[=] call[name[self].ulocalized_time, parameter[name[due_date]]] call[name[item]][constant[DueDate]] assign[=] name[due_date_str] variable[capture_date] assign[=] name[analysis_brain].getResultCaptureDate variable[capture_date] assign[=] <ast.BoolOp object at 0x7da207f98f70> if compare[name[capture_date] greater[>] name[due_date]] begin[:] variable[img] assign[=] call[name[get_image], parameter[constant[late.png]]] call[call[name[item]][constant[replace]]][constant[DueDate]] assign[=] call[constant[{} {}].format, parameter[name[due_date_str], name[img]]]
keyword[def] identifier[_folder_item_duedate] ( identifier[self] , identifier[analysis_brain] , identifier[item] ): literal[string] identifier[due_date] = identifier[analysis_brain] . identifier[getDueDate] keyword[if] keyword[not] identifier[due_date] : keyword[return] keyword[None] identifier[due_date_str] = identifier[self] . identifier[ulocalized_time] ( identifier[due_date] , identifier[long_format] = literal[int] ) identifier[item] [ literal[string] ]= identifier[due_date_str] identifier[capture_date] = identifier[analysis_brain] . identifier[getResultCaptureDate] identifier[capture_date] = identifier[capture_date] keyword[or] identifier[DateTime] () keyword[if] identifier[capture_date] > identifier[due_date] : identifier[img] = identifier[get_image] ( literal[string] , identifier[title] = identifier[t] ( identifier[_] ( literal[string] )), identifier[width] = literal[string] , identifier[height] = literal[string] ) identifier[item] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] ( identifier[due_date_str] , identifier[img] )
def _folder_item_duedate(self, analysis_brain, item): """Set the analysis' due date to the item passed in. :param analysis_brain: Brain that represents an analysis :param item: analysis' dictionary counterpart that represents a row """ # Note that if the analysis is a Reference Analysis, `getDueDate` # returns the date when the ReferenceSample expires. If the analysis is # a duplicate, `getDueDate` returns the due date of the source analysis due_date = analysis_brain.getDueDate if not due_date: return None # depends on [control=['if'], data=[]] due_date_str = self.ulocalized_time(due_date, long_format=0) item['DueDate'] = due_date_str # If the Analysis is late/overdue, display an icon capture_date = analysis_brain.getResultCaptureDate capture_date = capture_date or DateTime() if capture_date > due_date: # The analysis is late or overdue img = get_image('late.png', title=t(_('Late Analysis')), width='16px', height='16px') item['replace']['DueDate'] = '{} {}'.format(due_date_str, img) # depends on [control=['if'], data=[]]
async def wait_until_ready( self, timeout: Optional[float] = None, no_raise: bool = False ) -> bool: """ Waits for the underlying node to become ready. If no_raise is set, returns false when a timeout occurs instead of propogating TimeoutError. A timeout of None means to wait indefinitely. """ if self.node.ready.is_set(): return True try: return await self.node.wait_until_ready(timeout=timeout) except asyncio.TimeoutError: if no_raise: return False else: raise
<ast.AsyncFunctionDef object at 0x7da20c76fb80>
keyword[async] keyword[def] identifier[wait_until_ready] ( identifier[self] , identifier[timeout] : identifier[Optional] [ identifier[float] ]= keyword[None] , identifier[no_raise] : identifier[bool] = keyword[False] )-> identifier[bool] : literal[string] keyword[if] identifier[self] . identifier[node] . identifier[ready] . identifier[is_set] (): keyword[return] keyword[True] keyword[try] : keyword[return] keyword[await] identifier[self] . identifier[node] . identifier[wait_until_ready] ( identifier[timeout] = identifier[timeout] ) keyword[except] identifier[asyncio] . identifier[TimeoutError] : keyword[if] identifier[no_raise] : keyword[return] keyword[False] keyword[else] : keyword[raise]
async def wait_until_ready(self, timeout: Optional[float]=None, no_raise: bool=False) -> bool: """ Waits for the underlying node to become ready. If no_raise is set, returns false when a timeout occurs instead of propogating TimeoutError. A timeout of None means to wait indefinitely. """ if self.node.ready.is_set(): return True # depends on [control=['if'], data=[]] try: return await self.node.wait_until_ready(timeout=timeout) # depends on [control=['try'], data=[]] except asyncio.TimeoutError: if no_raise: return False # depends on [control=['if'], data=[]] else: raise # depends on [control=['except'], data=[]]
def create_port(self, context, network_id, port_id, **kwargs): """Create a port. :param context: neutron api request context. :param network_id: neutron network id. :param port_id: neutron port id. :param kwargs: required keys - device_id: neutron port device_id (instance_id) instance_node_id: nova hypervisor host id mac_address: neutron port mac address base_net_driver: the base network driver optional keys - addresses: list of allocated IPAddress models security_groups: list of associated security groups :raises IronicException: If the client is unable to create the downstream port for any reason, the exception will be logged and IronicException raised. """ LOG.info("create_port %s %s %s" % (context.tenant_id, network_id, port_id)) # sanity check if not kwargs.get('base_net_driver'): raise IronicException(msg='base_net_driver required.') base_net_driver = kwargs['base_net_driver'] if not kwargs.get('device_id'): raise IronicException(msg='device_id required.') device_id = kwargs['device_id'] if not kwargs.get('instance_node_id'): raise IronicException(msg='instance_node_id required.') instance_node_id = kwargs['instance_node_id'] if not kwargs.get('mac_address'): raise IronicException(msg='mac_address is required.') mac_address = str(netaddr.EUI(kwargs["mac_address"]["address"])) mac_address = mac_address.replace('-', ':') # TODO(morgabra): Change this when we enable security groups. if kwargs.get('security_groups'): msg = 'ironic driver does not support security group operations.' raise IronicException(msg=msg) # unroll the given address models into a fixed_ips list we can # pass downstream fixed_ips = [] addresses = kwargs.get('addresses') if not isinstance(addresses, list): addresses = [addresses] for address in addresses: fixed_ips.append(self._make_fixed_ip_dict(context, address)) body = { "id": port_id, "network_id": network_id, "device_id": device_id, "device_owner": kwargs.get('device_owner', ''), "tenant_id": context.tenant_id or "quark", "roles": context.roles, "mac_address": mac_address, "fixed_ips": fixed_ips, "switch:hardware_id": instance_node_id, "dynamic_network": not STRATEGY.is_provider_network(network_id) } net_info = self._get_base_network_info( context, network_id, base_net_driver) body.update(net_info) try: LOG.info("creating downstream port: %s" % (body)) port = self._create_port(context, body) LOG.info("created downstream port: %s" % (port)) return {"uuid": port['port']['id'], "vlan_id": port['port']['vlan_id']} except Exception as e: msg = "failed to create downstream port. Exception: %s" % (e) raise IronicException(msg=msg)
def function[create_port, parameter[self, context, network_id, port_id]]: constant[Create a port. :param context: neutron api request context. :param network_id: neutron network id. :param port_id: neutron port id. :param kwargs: required keys - device_id: neutron port device_id (instance_id) instance_node_id: nova hypervisor host id mac_address: neutron port mac address base_net_driver: the base network driver optional keys - addresses: list of allocated IPAddress models security_groups: list of associated security groups :raises IronicException: If the client is unable to create the downstream port for any reason, the exception will be logged and IronicException raised. ] call[name[LOG].info, parameter[binary_operation[constant[create_port %s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1affc0910>, <ast.Name object at 0x7da1affc2170>, <ast.Name object at 0x7da1affc2200>]]]]] if <ast.UnaryOp object at 0x7da1affc2140> begin[:] <ast.Raise object at 0x7da1affc3970> variable[base_net_driver] assign[=] call[name[kwargs]][constant[base_net_driver]] if <ast.UnaryOp object at 0x7da1affc0fa0> begin[:] <ast.Raise object at 0x7da1affc0370> variable[device_id] assign[=] call[name[kwargs]][constant[device_id]] if <ast.UnaryOp object at 0x7da1affc21d0> begin[:] <ast.Raise object at 0x7da1affc23b0> variable[instance_node_id] assign[=] call[name[kwargs]][constant[instance_node_id]] if <ast.UnaryOp object at 0x7da1affc0970> begin[:] <ast.Raise object at 0x7da1affc22c0> variable[mac_address] assign[=] call[name[str], parameter[call[name[netaddr].EUI, parameter[call[call[name[kwargs]][constant[mac_address]]][constant[address]]]]]] variable[mac_address] assign[=] call[name[mac_address].replace, parameter[constant[-], constant[:]]] if call[name[kwargs].get, parameter[constant[security_groups]]] begin[:] variable[msg] assign[=] constant[ironic driver does not support security group operations.] <ast.Raise object at 0x7da1affc18a0> variable[fixed_ips] assign[=] list[[]] variable[addresses] assign[=] call[name[kwargs].get, parameter[constant[addresses]]] if <ast.UnaryOp object at 0x7da1affc1600> begin[:] variable[addresses] assign[=] list[[<ast.Name object at 0x7da1affc0ca0>]] for taget[name[address]] in starred[name[addresses]] begin[:] call[name[fixed_ips].append, parameter[call[name[self]._make_fixed_ip_dict, parameter[name[context], name[address]]]]] variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1affc1330>, <ast.Constant object at 0x7da1affc27d0>, <ast.Constant object at 0x7da1affc1360>, <ast.Constant object at 0x7da1affc0040>, <ast.Constant object at 0x7da1affc00d0>, <ast.Constant object at 0x7da1affc00a0>, <ast.Constant object at 0x7da1affc2770>, <ast.Constant object at 0x7da1affc2860>, <ast.Constant object at 0x7da1affc0d00>, <ast.Constant object at 0x7da1affc2740>], [<ast.Name object at 0x7da1affc0f40>, <ast.Name object at 0x7da1affc2800>, <ast.Name object at 0x7da1affc01c0>, <ast.Call object at 0x7da1affc2fb0>, <ast.BoolOp object at 0x7da1affc03d0>, <ast.Attribute object at 0x7da1affc3dc0>, <ast.Name object at 0x7da1affc1120>, <ast.Name object at 0x7da1affc3d00>, <ast.Name object at 0x7da1affc1030>, <ast.UnaryOp object at 0x7da1affc0280>]] variable[net_info] assign[=] call[name[self]._get_base_network_info, parameter[name[context], name[network_id], name[base_net_driver]]] call[name[body].update, parameter[name[net_info]]] <ast.Try object at 0x7da1affc2f20>
keyword[def] identifier[create_port] ( identifier[self] , identifier[context] , identifier[network_id] , identifier[port_id] ,** identifier[kwargs] ): literal[string] identifier[LOG] . identifier[info] ( literal[string] %( identifier[context] . identifier[tenant_id] , identifier[network_id] , identifier[port_id] )) keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[raise] identifier[IronicException] ( identifier[msg] = literal[string] ) identifier[base_net_driver] = identifier[kwargs] [ literal[string] ] keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[raise] identifier[IronicException] ( identifier[msg] = literal[string] ) identifier[device_id] = identifier[kwargs] [ literal[string] ] keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[raise] identifier[IronicException] ( identifier[msg] = literal[string] ) identifier[instance_node_id] = identifier[kwargs] [ literal[string] ] keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[raise] identifier[IronicException] ( identifier[msg] = literal[string] ) identifier[mac_address] = identifier[str] ( identifier[netaddr] . identifier[EUI] ( identifier[kwargs] [ literal[string] ][ literal[string] ])) identifier[mac_address] = identifier[mac_address] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): identifier[msg] = literal[string] keyword[raise] identifier[IronicException] ( identifier[msg] = identifier[msg] ) identifier[fixed_ips] =[] identifier[addresses] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[addresses] , identifier[list] ): identifier[addresses] =[ identifier[addresses] ] keyword[for] identifier[address] keyword[in] identifier[addresses] : identifier[fixed_ips] . identifier[append] ( identifier[self] . identifier[_make_fixed_ip_dict] ( identifier[context] , identifier[address] )) identifier[body] ={ literal[string] : identifier[port_id] , literal[string] : identifier[network_id] , literal[string] : identifier[device_id] , literal[string] : identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ), literal[string] : identifier[context] . identifier[tenant_id] keyword[or] literal[string] , literal[string] : identifier[context] . identifier[roles] , literal[string] : identifier[mac_address] , literal[string] : identifier[fixed_ips] , literal[string] : identifier[instance_node_id] , literal[string] : keyword[not] identifier[STRATEGY] . identifier[is_provider_network] ( identifier[network_id] ) } identifier[net_info] = identifier[self] . identifier[_get_base_network_info] ( identifier[context] , identifier[network_id] , identifier[base_net_driver] ) identifier[body] . identifier[update] ( identifier[net_info] ) keyword[try] : identifier[LOG] . identifier[info] ( literal[string] %( identifier[body] )) identifier[port] = identifier[self] . identifier[_create_port] ( identifier[context] , identifier[body] ) identifier[LOG] . identifier[info] ( literal[string] %( identifier[port] )) keyword[return] { literal[string] : identifier[port] [ literal[string] ][ literal[string] ], literal[string] : identifier[port] [ literal[string] ][ literal[string] ]} keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[msg] = literal[string] %( identifier[e] ) keyword[raise] identifier[IronicException] ( identifier[msg] = identifier[msg] )
def create_port(self, context, network_id, port_id, **kwargs): """Create a port. :param context: neutron api request context. :param network_id: neutron network id. :param port_id: neutron port id. :param kwargs: required keys - device_id: neutron port device_id (instance_id) instance_node_id: nova hypervisor host id mac_address: neutron port mac address base_net_driver: the base network driver optional keys - addresses: list of allocated IPAddress models security_groups: list of associated security groups :raises IronicException: If the client is unable to create the downstream port for any reason, the exception will be logged and IronicException raised. """ LOG.info('create_port %s %s %s' % (context.tenant_id, network_id, port_id)) # sanity check if not kwargs.get('base_net_driver'): raise IronicException(msg='base_net_driver required.') # depends on [control=['if'], data=[]] base_net_driver = kwargs['base_net_driver'] if not kwargs.get('device_id'): raise IronicException(msg='device_id required.') # depends on [control=['if'], data=[]] device_id = kwargs['device_id'] if not kwargs.get('instance_node_id'): raise IronicException(msg='instance_node_id required.') # depends on [control=['if'], data=[]] instance_node_id = kwargs['instance_node_id'] if not kwargs.get('mac_address'): raise IronicException(msg='mac_address is required.') # depends on [control=['if'], data=[]] mac_address = str(netaddr.EUI(kwargs['mac_address']['address'])) mac_address = mac_address.replace('-', ':') # TODO(morgabra): Change this when we enable security groups. if kwargs.get('security_groups'): msg = 'ironic driver does not support security group operations.' raise IronicException(msg=msg) # depends on [control=['if'], data=[]] # unroll the given address models into a fixed_ips list we can # pass downstream fixed_ips = [] addresses = kwargs.get('addresses') if not isinstance(addresses, list): addresses = [addresses] # depends on [control=['if'], data=[]] for address in addresses: fixed_ips.append(self._make_fixed_ip_dict(context, address)) # depends on [control=['for'], data=['address']] body = {'id': port_id, 'network_id': network_id, 'device_id': device_id, 'device_owner': kwargs.get('device_owner', ''), 'tenant_id': context.tenant_id or 'quark', 'roles': context.roles, 'mac_address': mac_address, 'fixed_ips': fixed_ips, 'switch:hardware_id': instance_node_id, 'dynamic_network': not STRATEGY.is_provider_network(network_id)} net_info = self._get_base_network_info(context, network_id, base_net_driver) body.update(net_info) try: LOG.info('creating downstream port: %s' % body) port = self._create_port(context, body) LOG.info('created downstream port: %s' % port) return {'uuid': port['port']['id'], 'vlan_id': port['port']['vlan_id']} # depends on [control=['try'], data=[]] except Exception as e: msg = 'failed to create downstream port. Exception: %s' % e raise IronicException(msg=msg) # depends on [control=['except'], data=['e']]
def do_set_log_file(self, args): """Set the log file. Usage: set_log_file filename Parameters: filename: log file name to write to THIS CAN ONLY BE CALLED ONCE AND MUST BE CALLED BEFORE ANY LOGGING STARTS. """ params = args.split() try: filename = params[0] logging.basicConfig(filename=filename) except IndexError: self.do_help('set_log_file')
def function[do_set_log_file, parameter[self, args]]: constant[Set the log file. Usage: set_log_file filename Parameters: filename: log file name to write to THIS CAN ONLY BE CALLED ONCE AND MUST BE CALLED BEFORE ANY LOGGING STARTS. ] variable[params] assign[=] call[name[args].split, parameter[]] <ast.Try object at 0x7da1b1a479d0>
keyword[def] identifier[do_set_log_file] ( identifier[self] , identifier[args] ): literal[string] identifier[params] = identifier[args] . identifier[split] () keyword[try] : identifier[filename] = identifier[params] [ literal[int] ] identifier[logging] . identifier[basicConfig] ( identifier[filename] = identifier[filename] ) keyword[except] identifier[IndexError] : identifier[self] . identifier[do_help] ( literal[string] )
def do_set_log_file(self, args): """Set the log file. Usage: set_log_file filename Parameters: filename: log file name to write to THIS CAN ONLY BE CALLED ONCE AND MUST BE CALLED BEFORE ANY LOGGING STARTS. """ params = args.split() try: filename = params[0] logging.basicConfig(filename=filename) # depends on [control=['try'], data=[]] except IndexError: self.do_help('set_log_file') # depends on [control=['except'], data=[]]
def _create_widget_content(sender, instance, created=False, **kwargs): """ create a widget content instance when a WidgetContentModel is deleted """ if not issubclass(sender, WidgetContentModel): return # create a WidgetContent Entry if created: instance.create_widget_content_entry() return
def function[_create_widget_content, parameter[sender, instance, created]]: constant[ create a widget content instance when a WidgetContentModel is deleted ] if <ast.UnaryOp object at 0x7da20e957040> begin[:] return[None] if name[created] begin[:] call[name[instance].create_widget_content_entry, parameter[]] return[None]
keyword[def] identifier[_create_widget_content] ( identifier[sender] , identifier[instance] , identifier[created] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[issubclass] ( identifier[sender] , identifier[WidgetContentModel] ): keyword[return] keyword[if] identifier[created] : identifier[instance] . identifier[create_widget_content_entry] () keyword[return]
def _create_widget_content(sender, instance, created=False, **kwargs): """ create a widget content instance when a WidgetContentModel is deleted """ if not issubclass(sender, WidgetContentModel): return # depends on [control=['if'], data=[]] # create a WidgetContent Entry if created: instance.create_widget_content_entry() return # depends on [control=['if'], data=[]]
def get_current_future_chain(self, continuous_future, dt): """ Retrieves the future chain for the contract at the given `dt` according the `continuous_future` specification. Returns ------- future_chain : list[Future] A list of active futures, where the first index is the current contract specified by the continuous future definition, the second is the next upcoming contract and so on. """ rf = self._roll_finders[continuous_future.roll_style] session = self.trading_calendar.minute_to_session_label(dt) contract_center = rf.get_contract_center( continuous_future.root_symbol, session, continuous_future.offset) oc = self.asset_finder.get_ordered_contracts( continuous_future.root_symbol) chain = oc.active_chain(contract_center, session.value) return self.asset_finder.retrieve_all(chain)
def function[get_current_future_chain, parameter[self, continuous_future, dt]]: constant[ Retrieves the future chain for the contract at the given `dt` according the `continuous_future` specification. Returns ------- future_chain : list[Future] A list of active futures, where the first index is the current contract specified by the continuous future definition, the second is the next upcoming contract and so on. ] variable[rf] assign[=] call[name[self]._roll_finders][name[continuous_future].roll_style] variable[session] assign[=] call[name[self].trading_calendar.minute_to_session_label, parameter[name[dt]]] variable[contract_center] assign[=] call[name[rf].get_contract_center, parameter[name[continuous_future].root_symbol, name[session], name[continuous_future].offset]] variable[oc] assign[=] call[name[self].asset_finder.get_ordered_contracts, parameter[name[continuous_future].root_symbol]] variable[chain] assign[=] call[name[oc].active_chain, parameter[name[contract_center], name[session].value]] return[call[name[self].asset_finder.retrieve_all, parameter[name[chain]]]]
keyword[def] identifier[get_current_future_chain] ( identifier[self] , identifier[continuous_future] , identifier[dt] ): literal[string] identifier[rf] = identifier[self] . identifier[_roll_finders] [ identifier[continuous_future] . identifier[roll_style] ] identifier[session] = identifier[self] . identifier[trading_calendar] . identifier[minute_to_session_label] ( identifier[dt] ) identifier[contract_center] = identifier[rf] . identifier[get_contract_center] ( identifier[continuous_future] . identifier[root_symbol] , identifier[session] , identifier[continuous_future] . identifier[offset] ) identifier[oc] = identifier[self] . identifier[asset_finder] . identifier[get_ordered_contracts] ( identifier[continuous_future] . identifier[root_symbol] ) identifier[chain] = identifier[oc] . identifier[active_chain] ( identifier[contract_center] , identifier[session] . identifier[value] ) keyword[return] identifier[self] . identifier[asset_finder] . identifier[retrieve_all] ( identifier[chain] )
def get_current_future_chain(self, continuous_future, dt): """ Retrieves the future chain for the contract at the given `dt` according the `continuous_future` specification. Returns ------- future_chain : list[Future] A list of active futures, where the first index is the current contract specified by the continuous future definition, the second is the next upcoming contract and so on. """ rf = self._roll_finders[continuous_future.roll_style] session = self.trading_calendar.minute_to_session_label(dt) contract_center = rf.get_contract_center(continuous_future.root_symbol, session, continuous_future.offset) oc = self.asset_finder.get_ordered_contracts(continuous_future.root_symbol) chain = oc.active_chain(contract_center, session.value) return self.asset_finder.retrieve_all(chain)
def connect_to_rackspace(region, access_key_id, secret_access_key): """ returns a connection object to Rackspace """ pyrax.set_setting('identity_type', 'rackspace') pyrax.set_default_region(region) pyrax.set_credentials(access_key_id, secret_access_key) nova = pyrax.connect_to_cloudservers(region=region) return nova
def function[connect_to_rackspace, parameter[region, access_key_id, secret_access_key]]: constant[ returns a connection object to Rackspace ] call[name[pyrax].set_setting, parameter[constant[identity_type], constant[rackspace]]] call[name[pyrax].set_default_region, parameter[name[region]]] call[name[pyrax].set_credentials, parameter[name[access_key_id], name[secret_access_key]]] variable[nova] assign[=] call[name[pyrax].connect_to_cloudservers, parameter[]] return[name[nova]]
keyword[def] identifier[connect_to_rackspace] ( identifier[region] , identifier[access_key_id] , identifier[secret_access_key] ): literal[string] identifier[pyrax] . identifier[set_setting] ( literal[string] , literal[string] ) identifier[pyrax] . identifier[set_default_region] ( identifier[region] ) identifier[pyrax] . identifier[set_credentials] ( identifier[access_key_id] , identifier[secret_access_key] ) identifier[nova] = identifier[pyrax] . identifier[connect_to_cloudservers] ( identifier[region] = identifier[region] ) keyword[return] identifier[nova]
def connect_to_rackspace(region, access_key_id, secret_access_key): """ returns a connection object to Rackspace """ pyrax.set_setting('identity_type', 'rackspace') pyrax.set_default_region(region) pyrax.set_credentials(access_key_id, secret_access_key) nova = pyrax.connect_to_cloudservers(region=region) return nova
def mimetype_icon(path, fallback=None): """ Tries to create an icon from theme using the file mimetype. E.g.:: return self.mimetype_icon( path, fallback=':/icons/text-x-python.png') :param path: file path for which the icon must be created :param fallback: fallback icon path (qrc or file system) :returns: QIcon or None if the file mimetype icon could not be found. """ mime = mimetypes.guess_type(path)[0] if mime: icon = mime.replace('/', '-') # if system.WINDOWS: # return icons.file() if QtGui.QIcon.hasThemeIcon(icon): icon = QtGui.QIcon.fromTheme(icon) if not icon.isNull(): return icon if fallback: return QtGui.QIcon(fallback) return QtGui.QIcon.fromTheme('text-x-generic')
def function[mimetype_icon, parameter[path, fallback]]: constant[ Tries to create an icon from theme using the file mimetype. E.g.:: return self.mimetype_icon( path, fallback=':/icons/text-x-python.png') :param path: file path for which the icon must be created :param fallback: fallback icon path (qrc or file system) :returns: QIcon or None if the file mimetype icon could not be found. ] variable[mime] assign[=] call[call[name[mimetypes].guess_type, parameter[name[path]]]][constant[0]] if name[mime] begin[:] variable[icon] assign[=] call[name[mime].replace, parameter[constant[/], constant[-]]] if call[name[QtGui].QIcon.hasThemeIcon, parameter[name[icon]]] begin[:] variable[icon] assign[=] call[name[QtGui].QIcon.fromTheme, parameter[name[icon]]] if <ast.UnaryOp object at 0x7da20c76f040> begin[:] return[name[icon]] if name[fallback] begin[:] return[call[name[QtGui].QIcon, parameter[name[fallback]]]] return[call[name[QtGui].QIcon.fromTheme, parameter[constant[text-x-generic]]]]
keyword[def] identifier[mimetype_icon] ( identifier[path] , identifier[fallback] = keyword[None] ): literal[string] identifier[mime] = identifier[mimetypes] . identifier[guess_type] ( identifier[path] )[ literal[int] ] keyword[if] identifier[mime] : identifier[icon] = identifier[mime] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[QtGui] . identifier[QIcon] . identifier[hasThemeIcon] ( identifier[icon] ): identifier[icon] = identifier[QtGui] . identifier[QIcon] . identifier[fromTheme] ( identifier[icon] ) keyword[if] keyword[not] identifier[icon] . identifier[isNull] (): keyword[return] identifier[icon] keyword[if] identifier[fallback] : keyword[return] identifier[QtGui] . identifier[QIcon] ( identifier[fallback] ) keyword[return] identifier[QtGui] . identifier[QIcon] . identifier[fromTheme] ( literal[string] )
def mimetype_icon(path, fallback=None): """ Tries to create an icon from theme using the file mimetype. E.g.:: return self.mimetype_icon( path, fallback=':/icons/text-x-python.png') :param path: file path for which the icon must be created :param fallback: fallback icon path (qrc or file system) :returns: QIcon or None if the file mimetype icon could not be found. """ mime = mimetypes.guess_type(path)[0] if mime: icon = mime.replace('/', '-') # if system.WINDOWS: # return icons.file() if QtGui.QIcon.hasThemeIcon(icon): icon = QtGui.QIcon.fromTheme(icon) if not icon.isNull(): return icon # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if fallback: return QtGui.QIcon(fallback) # depends on [control=['if'], data=[]] return QtGui.QIcon.fromTheme('text-x-generic')
def disallowed_table(*tables): """Returns True if a set of tables is in the blacklist or, if a whitelist is set, any of the tables is not in the whitelist. False otherwise.""" # XXX: When using a black or white list, this has to be done EVERY query; # It'd be nice to make this as fast as possible. In general, queries # should have relatively few tables involved, and I don't imagine that # blacklists would grow very vast. The fastest i've been able to come # up with is to pre-create a blacklist set and use intersect. return not bool(settings.WHITELIST.issuperset(tables)) if settings.WHITELIST\ else bool(settings.BLACKLIST.intersection(tables))
def function[disallowed_table, parameter[]]: constant[Returns True if a set of tables is in the blacklist or, if a whitelist is set, any of the tables is not in the whitelist. False otherwise.] return[<ast.IfExp object at 0x7da18f7200d0>]
keyword[def] identifier[disallowed_table] (* identifier[tables] ): literal[string] keyword[return] keyword[not] identifier[bool] ( identifier[settings] . identifier[WHITELIST] . identifier[issuperset] ( identifier[tables] )) keyword[if] identifier[settings] . identifier[WHITELIST] keyword[else] identifier[bool] ( identifier[settings] . identifier[BLACKLIST] . identifier[intersection] ( identifier[tables] ))
def disallowed_table(*tables): """Returns True if a set of tables is in the blacklist or, if a whitelist is set, any of the tables is not in the whitelist. False otherwise.""" # XXX: When using a black or white list, this has to be done EVERY query; # It'd be nice to make this as fast as possible. In general, queries # should have relatively few tables involved, and I don't imagine that # blacklists would grow very vast. The fastest i've been able to come # up with is to pre-create a blacklist set and use intersect. return not bool(settings.WHITELIST.issuperset(tables)) if settings.WHITELIST else bool(settings.BLACKLIST.intersection(tables))
def schema_from_json(self, file_or_path): """Takes a file object or file path that contains json that describes a table schema. Returns: List of schema field objects. """ if isinstance(file_or_path, io.IOBase): return self._schema_from_json_file_object(file_or_path) with open(file_or_path) as file_obj: return self._schema_from_json_file_object(file_obj)
def function[schema_from_json, parameter[self, file_or_path]]: constant[Takes a file object or file path that contains json that describes a table schema. Returns: List of schema field objects. ] if call[name[isinstance], parameter[name[file_or_path], name[io].IOBase]] begin[:] return[call[name[self]._schema_from_json_file_object, parameter[name[file_or_path]]]] with call[name[open], parameter[name[file_or_path]]] begin[:] return[call[name[self]._schema_from_json_file_object, parameter[name[file_obj]]]]
keyword[def] identifier[schema_from_json] ( identifier[self] , identifier[file_or_path] ): literal[string] keyword[if] identifier[isinstance] ( identifier[file_or_path] , identifier[io] . identifier[IOBase] ): keyword[return] identifier[self] . identifier[_schema_from_json_file_object] ( identifier[file_or_path] ) keyword[with] identifier[open] ( identifier[file_or_path] ) keyword[as] identifier[file_obj] : keyword[return] identifier[self] . identifier[_schema_from_json_file_object] ( identifier[file_obj] )
def schema_from_json(self, file_or_path): """Takes a file object or file path that contains json that describes a table schema. Returns: List of schema field objects. """ if isinstance(file_or_path, io.IOBase): return self._schema_from_json_file_object(file_or_path) # depends on [control=['if'], data=[]] with open(file_or_path) as file_obj: return self._schema_from_json_file_object(file_obj) # depends on [control=['with'], data=['file_obj']]
def deserialize_block(value): """ Deserialize a byte string into a BlockWrapper Args: value (bytes): the byte string to deserialze Returns: BlockWrapper: a block wrapper instance """ # Block id strings are stored under batch/txn ids for reference. # Only Blocks, not ids or Nones, should be returned by _get_block. block = Block() block.ParseFromString(value) return BlockWrapper( block=block)
def function[deserialize_block, parameter[value]]: constant[ Deserialize a byte string into a BlockWrapper Args: value (bytes): the byte string to deserialze Returns: BlockWrapper: a block wrapper instance ] variable[block] assign[=] call[name[Block], parameter[]] call[name[block].ParseFromString, parameter[name[value]]] return[call[name[BlockWrapper], parameter[]]]
keyword[def] identifier[deserialize_block] ( identifier[value] ): literal[string] identifier[block] = identifier[Block] () identifier[block] . identifier[ParseFromString] ( identifier[value] ) keyword[return] identifier[BlockWrapper] ( identifier[block] = identifier[block] )
def deserialize_block(value): """ Deserialize a byte string into a BlockWrapper Args: value (bytes): the byte string to deserialze Returns: BlockWrapper: a block wrapper instance """ # Block id strings are stored under batch/txn ids for reference. # Only Blocks, not ids or Nones, should be returned by _get_block. block = Block() block.ParseFromString(value) return BlockWrapper(block=block)
def decimate_percentile(self, a, maxpoints, **kwargs): """Return data *a* percentile-decimated on *maxpoints*. Histograms each column into *maxpoints* bins and calculates the percentile *per* in each bin as the decimated data, using :func:`numkit.timeseries.percentile_histogrammed_function`. The coarse grained time in the first column contains the centers of the histogram time. If *a* contains <= *maxpoints* then *a* is simply returned; otherwise a new array of the same dimensions but with a reduced number of *maxpoints* points is returned. .. Note:: Assumes that the first column is time. :Keywords: *per* percentile as a percentage, e.g. 75 is the value that splits the data into the lower 75% and upper 25%; 50 is the median [50.0] .. SeeAlso:: :func:`numkit.timeseries.regularized_function` with :func:`scipy.stats.scoreatpercentile` """ return self._decimate(numkit.timeseries.percentile_histogrammed_function, a, maxpoints, **kwargs)
def function[decimate_percentile, parameter[self, a, maxpoints]]: constant[Return data *a* percentile-decimated on *maxpoints*. Histograms each column into *maxpoints* bins and calculates the percentile *per* in each bin as the decimated data, using :func:`numkit.timeseries.percentile_histogrammed_function`. The coarse grained time in the first column contains the centers of the histogram time. If *a* contains <= *maxpoints* then *a* is simply returned; otherwise a new array of the same dimensions but with a reduced number of *maxpoints* points is returned. .. Note:: Assumes that the first column is time. :Keywords: *per* percentile as a percentage, e.g. 75 is the value that splits the data into the lower 75% and upper 25%; 50 is the median [50.0] .. SeeAlso:: :func:`numkit.timeseries.regularized_function` with :func:`scipy.stats.scoreatpercentile` ] return[call[name[self]._decimate, parameter[name[numkit].timeseries.percentile_histogrammed_function, name[a], name[maxpoints]]]]
keyword[def] identifier[decimate_percentile] ( identifier[self] , identifier[a] , identifier[maxpoints] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_decimate] ( identifier[numkit] . identifier[timeseries] . identifier[percentile_histogrammed_function] , identifier[a] , identifier[maxpoints] ,** identifier[kwargs] )
def decimate_percentile(self, a, maxpoints, **kwargs): """Return data *a* percentile-decimated on *maxpoints*. Histograms each column into *maxpoints* bins and calculates the percentile *per* in each bin as the decimated data, using :func:`numkit.timeseries.percentile_histogrammed_function`. The coarse grained time in the first column contains the centers of the histogram time. If *a* contains <= *maxpoints* then *a* is simply returned; otherwise a new array of the same dimensions but with a reduced number of *maxpoints* points is returned. .. Note:: Assumes that the first column is time. :Keywords: *per* percentile as a percentage, e.g. 75 is the value that splits the data into the lower 75% and upper 25%; 50 is the median [50.0] .. SeeAlso:: :func:`numkit.timeseries.regularized_function` with :func:`scipy.stats.scoreatpercentile` """ return self._decimate(numkit.timeseries.percentile_histogrammed_function, a, maxpoints, **kwargs)
def _read_callback(connection_id, data_buffer, data_length_pointer): """ Callback called by Secure Transport to actually read the socket :param connection_id: An integer identifing the connection :param data_buffer: A char pointer FFI type to write the data to :param data_length_pointer: A size_t pointer FFI type of the amount of data to read. Will be overwritten with the amount of data read on return. :return: An integer status code of the result - 0 for success """ self = None try: self = _connection_refs.get(connection_id) if not self: socket = _socket_refs.get(connection_id) else: socket = self._socket if not self and not socket: return 0 bytes_requested = deref(data_length_pointer) timeout = socket.gettimeout() error = None data = b'' try: while len(data) < bytes_requested: # Python 2 on Travis CI seems to have issues with blocking on # recv() for longer than the socket timeout value, so we select if timeout is not None and timeout > 0.0: read_ready, _, _ = select.select([socket], [], [], timeout) if len(read_ready) == 0: raise socket_.error(errno.EAGAIN, 'timed out') chunk = socket.recv(bytes_requested - len(data)) data += chunk if chunk == b'': if len(data) == 0: if timeout is None: return SecurityConst.errSSLClosedNoNotify return SecurityConst.errSSLClosedAbort break except (socket_.error) as e: error = e.errno if error is not None and error != errno.EAGAIN: if error == errno.ECONNRESET or error == errno.EPIPE: return SecurityConst.errSSLClosedNoNotify return SecurityConst.errSSLClosedAbort if self and not self._done_handshake: # SecureTransport doesn't bother to check if the TLS record header # is valid before asking to read more data, which can result in # connection hangs. Here we do basic checks to get around the issue. if len(data) >= 3 and len(self._server_hello) == 0: # Check to ensure it is an alert or handshake first valid_record_type = data[0:1] in set([b'\x15', b'\x16']) # Check if the protocol version is SSL 3.0 or TLS 1.0-1.3 valid_protocol_version = data[1:3] in set([ b'\x03\x00', b'\x03\x01', b'\x03\x02', b'\x03\x03', b'\x03\x04' ]) if not valid_record_type or not valid_protocol_version: self._server_hello += data + _read_remaining(socket) return SecurityConst.errSSLProtocol self._server_hello += data write_to_buffer(data_buffer, data) pointer_set(data_length_pointer, len(data)) if len(data) != bytes_requested: return SecurityConst.errSSLWouldBlock return 0 except (KeyboardInterrupt) as e: if self: self._exception = e return SecurityConst.errSSLClosedAbort
def function[_read_callback, parameter[connection_id, data_buffer, data_length_pointer]]: constant[ Callback called by Secure Transport to actually read the socket :param connection_id: An integer identifing the connection :param data_buffer: A char pointer FFI type to write the data to :param data_length_pointer: A size_t pointer FFI type of the amount of data to read. Will be overwritten with the amount of data read on return. :return: An integer status code of the result - 0 for success ] variable[self] assign[=] constant[None] <ast.Try object at 0x7da1aff63d90>
keyword[def] identifier[_read_callback] ( identifier[connection_id] , identifier[data_buffer] , identifier[data_length_pointer] ): literal[string] identifier[self] = keyword[None] keyword[try] : identifier[self] = identifier[_connection_refs] . identifier[get] ( identifier[connection_id] ) keyword[if] keyword[not] identifier[self] : identifier[socket] = identifier[_socket_refs] . identifier[get] ( identifier[connection_id] ) keyword[else] : identifier[socket] = identifier[self] . identifier[_socket] keyword[if] keyword[not] identifier[self] keyword[and] keyword[not] identifier[socket] : keyword[return] literal[int] identifier[bytes_requested] = identifier[deref] ( identifier[data_length_pointer] ) identifier[timeout] = identifier[socket] . identifier[gettimeout] () identifier[error] = keyword[None] identifier[data] = literal[string] keyword[try] : keyword[while] identifier[len] ( identifier[data] )< identifier[bytes_requested] : keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] keyword[and] identifier[timeout] > literal[int] : identifier[read_ready] , identifier[_] , identifier[_] = identifier[select] . identifier[select] ([ identifier[socket] ],[],[], identifier[timeout] ) keyword[if] identifier[len] ( identifier[read_ready] )== literal[int] : keyword[raise] identifier[socket_] . identifier[error] ( identifier[errno] . identifier[EAGAIN] , literal[string] ) identifier[chunk] = identifier[socket] . identifier[recv] ( identifier[bytes_requested] - identifier[len] ( identifier[data] )) identifier[data] += identifier[chunk] keyword[if] identifier[chunk] == literal[string] : keyword[if] identifier[len] ( identifier[data] )== literal[int] : keyword[if] identifier[timeout] keyword[is] keyword[None] : keyword[return] identifier[SecurityConst] . identifier[errSSLClosedNoNotify] keyword[return] identifier[SecurityConst] . identifier[errSSLClosedAbort] keyword[break] keyword[except] ( identifier[socket_] . identifier[error] ) keyword[as] identifier[e] : identifier[error] = identifier[e] . identifier[errno] keyword[if] identifier[error] keyword[is] keyword[not] keyword[None] keyword[and] identifier[error] != identifier[errno] . identifier[EAGAIN] : keyword[if] identifier[error] == identifier[errno] . identifier[ECONNRESET] keyword[or] identifier[error] == identifier[errno] . identifier[EPIPE] : keyword[return] identifier[SecurityConst] . identifier[errSSLClosedNoNotify] keyword[return] identifier[SecurityConst] . identifier[errSSLClosedAbort] keyword[if] identifier[self] keyword[and] keyword[not] identifier[self] . identifier[_done_handshake] : keyword[if] identifier[len] ( identifier[data] )>= literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[_server_hello] )== literal[int] : identifier[valid_record_type] = identifier[data] [ literal[int] : literal[int] ] keyword[in] identifier[set] ([ literal[string] , literal[string] ]) identifier[valid_protocol_version] = identifier[data] [ literal[int] : literal[int] ] keyword[in] identifier[set] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) keyword[if] keyword[not] identifier[valid_record_type] keyword[or] keyword[not] identifier[valid_protocol_version] : identifier[self] . identifier[_server_hello] += identifier[data] + identifier[_read_remaining] ( identifier[socket] ) keyword[return] identifier[SecurityConst] . identifier[errSSLProtocol] identifier[self] . identifier[_server_hello] += identifier[data] identifier[write_to_buffer] ( identifier[data_buffer] , identifier[data] ) identifier[pointer_set] ( identifier[data_length_pointer] , identifier[len] ( identifier[data] )) keyword[if] identifier[len] ( identifier[data] )!= identifier[bytes_requested] : keyword[return] identifier[SecurityConst] . identifier[errSSLWouldBlock] keyword[return] literal[int] keyword[except] ( identifier[KeyboardInterrupt] ) keyword[as] identifier[e] : keyword[if] identifier[self] : identifier[self] . identifier[_exception] = identifier[e] keyword[return] identifier[SecurityConst] . identifier[errSSLClosedAbort]
def _read_callback(connection_id, data_buffer, data_length_pointer): """ Callback called by Secure Transport to actually read the socket :param connection_id: An integer identifing the connection :param data_buffer: A char pointer FFI type to write the data to :param data_length_pointer: A size_t pointer FFI type of the amount of data to read. Will be overwritten with the amount of data read on return. :return: An integer status code of the result - 0 for success """ self = None try: self = _connection_refs.get(connection_id) if not self: socket = _socket_refs.get(connection_id) # depends on [control=['if'], data=[]] else: socket = self._socket if not self and (not socket): return 0 # depends on [control=['if'], data=[]] bytes_requested = deref(data_length_pointer) timeout = socket.gettimeout() error = None data = b'' try: while len(data) < bytes_requested: # Python 2 on Travis CI seems to have issues with blocking on # recv() for longer than the socket timeout value, so we select if timeout is not None and timeout > 0.0: (read_ready, _, _) = select.select([socket], [], [], timeout) if len(read_ready) == 0: raise socket_.error(errno.EAGAIN, 'timed out') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] chunk = socket.recv(bytes_requested - len(data)) data += chunk if chunk == b'': if len(data) == 0: if timeout is None: return SecurityConst.errSSLClosedNoNotify # depends on [control=['if'], data=[]] return SecurityConst.errSSLClosedAbort # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['bytes_requested']] # depends on [control=['try'], data=[]] except socket_.error as e: error = e.errno # depends on [control=['except'], data=['e']] if error is not None and error != errno.EAGAIN: if error == errno.ECONNRESET or error == errno.EPIPE: return SecurityConst.errSSLClosedNoNotify # depends on [control=['if'], data=[]] return SecurityConst.errSSLClosedAbort # depends on [control=['if'], data=[]] if self and (not self._done_handshake): # SecureTransport doesn't bother to check if the TLS record header # is valid before asking to read more data, which can result in # connection hangs. Here we do basic checks to get around the issue. if len(data) >= 3 and len(self._server_hello) == 0: # Check to ensure it is an alert or handshake first valid_record_type = data[0:1] in set([b'\x15', b'\x16']) # Check if the protocol version is SSL 3.0 or TLS 1.0-1.3 valid_protocol_version = data[1:3] in set([b'\x03\x00', b'\x03\x01', b'\x03\x02', b'\x03\x03', b'\x03\x04']) if not valid_record_type or not valid_protocol_version: self._server_hello += data + _read_remaining(socket) return SecurityConst.errSSLProtocol # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self._server_hello += data # depends on [control=['if'], data=[]] write_to_buffer(data_buffer, data) pointer_set(data_length_pointer, len(data)) if len(data) != bytes_requested: return SecurityConst.errSSLWouldBlock # depends on [control=['if'], data=[]] return 0 # depends on [control=['try'], data=[]] except KeyboardInterrupt as e: if self: self._exception = e # depends on [control=['if'], data=[]] return SecurityConst.errSSLClosedAbort # depends on [control=['except'], data=['e']]
def removeUserGroups(self, users=None): """Removes users' groups. Args: users (str): A comma delimited list of user names. Defaults to ``None``. Warning: When ``users`` is not provided (``None``), all users in the organization will have their groups deleted! """ admin = None userCommunity = None portal = None groupAdmin = None user = None userCommData = None group = None try: admin = arcrest.manageorg.Administration(securityHandler=self._securityHandler) if users is None: print ("You have selected to remove all users groups, you must modify the code to do this") usersObj = [] commUsers = admin.portals.portalSelf.users(start=1, num=100) usersObj = commUsers['users'] return else: usersObj = [] userStr = users.split(',') for user in userStr: try: user = admin.community.users.user(str(user).strip()) usersObj.append(user) except: print ("%s does not exist" % str(user).strip()) if usersObj: for userCommData in usersObj: print ("Loading groups for user: %s" % userCommData.username) if userCommData.groups: for group in userCommData.groups: groupObj = admin.community.groups.group(groupId=group['id']) if groupObj.owner == userCommData.username: print (groupObj.delete()) else: print ("No Groups Found") except: line, filename, synerror = trace() raise common.ArcRestHelperError({ "function": "removeUserGroups", "line": line, "filename": filename, "synerror": synerror, } ) finally: admin = None userCommunity = None portal = None groupAdmin = None user = None userCommData = None group = None del admin del userCommunity del portal del groupAdmin del user del userCommData del group gc.collect()
def function[removeUserGroups, parameter[self, users]]: constant[Removes users' groups. Args: users (str): A comma delimited list of user names. Defaults to ``None``. Warning: When ``users`` is not provided (``None``), all users in the organization will have their groups deleted! ] variable[admin] assign[=] constant[None] variable[userCommunity] assign[=] constant[None] variable[portal] assign[=] constant[None] variable[groupAdmin] assign[=] constant[None] variable[user] assign[=] constant[None] variable[userCommData] assign[=] constant[None] variable[group] assign[=] constant[None] <ast.Try object at 0x7da1b128a0e0>
keyword[def] identifier[removeUserGroups] ( identifier[self] , identifier[users] = keyword[None] ): literal[string] identifier[admin] = keyword[None] identifier[userCommunity] = keyword[None] identifier[portal] = keyword[None] identifier[groupAdmin] = keyword[None] identifier[user] = keyword[None] identifier[userCommData] = keyword[None] identifier[group] = keyword[None] keyword[try] : identifier[admin] = identifier[arcrest] . identifier[manageorg] . identifier[Administration] ( identifier[securityHandler] = identifier[self] . identifier[_securityHandler] ) keyword[if] identifier[users] keyword[is] keyword[None] : identifier[print] ( literal[string] ) identifier[usersObj] =[] identifier[commUsers] = identifier[admin] . identifier[portals] . identifier[portalSelf] . identifier[users] ( identifier[start] = literal[int] , identifier[num] = literal[int] ) identifier[usersObj] = identifier[commUsers] [ literal[string] ] keyword[return] keyword[else] : identifier[usersObj] =[] identifier[userStr] = identifier[users] . identifier[split] ( literal[string] ) keyword[for] identifier[user] keyword[in] identifier[userStr] : keyword[try] : identifier[user] = identifier[admin] . identifier[community] . identifier[users] . identifier[user] ( identifier[str] ( identifier[user] ). identifier[strip] ()) identifier[usersObj] . identifier[append] ( identifier[user] ) keyword[except] : identifier[print] ( literal[string] % identifier[str] ( identifier[user] ). identifier[strip] ()) keyword[if] identifier[usersObj] : keyword[for] identifier[userCommData] keyword[in] identifier[usersObj] : identifier[print] ( literal[string] % identifier[userCommData] . identifier[username] ) keyword[if] identifier[userCommData] . identifier[groups] : keyword[for] identifier[group] keyword[in] identifier[userCommData] . identifier[groups] : identifier[groupObj] = identifier[admin] . identifier[community] . identifier[groups] . identifier[group] ( identifier[groupId] = identifier[group] [ literal[string] ]) keyword[if] identifier[groupObj] . identifier[owner] == identifier[userCommData] . identifier[username] : identifier[print] ( identifier[groupObj] . identifier[delete] ()) keyword[else] : identifier[print] ( literal[string] ) keyword[except] : identifier[line] , identifier[filename] , identifier[synerror] = identifier[trace] () keyword[raise] identifier[common] . identifier[ArcRestHelperError] ({ literal[string] : literal[string] , literal[string] : identifier[line] , literal[string] : identifier[filename] , literal[string] : identifier[synerror] , } ) keyword[finally] : identifier[admin] = keyword[None] identifier[userCommunity] = keyword[None] identifier[portal] = keyword[None] identifier[groupAdmin] = keyword[None] identifier[user] = keyword[None] identifier[userCommData] = keyword[None] identifier[group] = keyword[None] keyword[del] identifier[admin] keyword[del] identifier[userCommunity] keyword[del] identifier[portal] keyword[del] identifier[groupAdmin] keyword[del] identifier[user] keyword[del] identifier[userCommData] keyword[del] identifier[group] identifier[gc] . identifier[collect] ()
def removeUserGroups(self, users=None): """Removes users' groups. Args: users (str): A comma delimited list of user names. Defaults to ``None``. Warning: When ``users`` is not provided (``None``), all users in the organization will have their groups deleted! """ admin = None userCommunity = None portal = None groupAdmin = None user = None userCommData = None group = None try: admin = arcrest.manageorg.Administration(securityHandler=self._securityHandler) if users is None: print('You have selected to remove all users groups, you must modify the code to do this') usersObj = [] commUsers = admin.portals.portalSelf.users(start=1, num=100) usersObj = commUsers['users'] return # depends on [control=['if'], data=[]] else: usersObj = [] userStr = users.split(',') for user in userStr: try: user = admin.community.users.user(str(user).strip()) usersObj.append(user) # depends on [control=['try'], data=[]] except: print('%s does not exist' % str(user).strip()) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['user']] if usersObj: for userCommData in usersObj: print('Loading groups for user: %s' % userCommData.username) if userCommData.groups: for group in userCommData.groups: groupObj = admin.community.groups.group(groupId=group['id']) if groupObj.owner == userCommData.username: print(groupObj.delete()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']] # depends on [control=['if'], data=[]] else: print('No Groups Found') # depends on [control=['for'], data=['userCommData']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: (line, filename, synerror) = trace() raise common.ArcRestHelperError({'function': 'removeUserGroups', 'line': line, 'filename': filename, 'synerror': synerror}) # depends on [control=['except'], data=[]] finally: admin = None userCommunity = None portal = None groupAdmin = None user = None userCommData = None group = None del admin del userCommunity del portal del groupAdmin del user del userCommData del group gc.collect()
def update_initiators(self, iqns=None, wwns=None): """Primarily for puppet-unity use. Update the iSCSI and FC initiators if needed. """ # First get current iqns iqns = set(iqns) if iqns else set() current_iqns = set() if self.iscsi_host_initiators: current_iqns = {initiator.initiator_id for initiator in self.iscsi_host_initiators} # Then get current wwns wwns = set(wwns) if wwns else set() current_wwns = set() if self.fc_host_initiators: current_wwns = {initiator.initiator_id for initiator in self.fc_host_initiators} updater = UnityHostInitiatorUpdater( self, current_iqns | current_wwns, iqns | wwns) return updater.update()
def function[update_initiators, parameter[self, iqns, wwns]]: constant[Primarily for puppet-unity use. Update the iSCSI and FC initiators if needed. ] variable[iqns] assign[=] <ast.IfExp object at 0x7da20c6c5a20> variable[current_iqns] assign[=] call[name[set], parameter[]] if name[self].iscsi_host_initiators begin[:] variable[current_iqns] assign[=] <ast.SetComp object at 0x7da20c6c5510> variable[wwns] assign[=] <ast.IfExp object at 0x7da20c6c54e0> variable[current_wwns] assign[=] call[name[set], parameter[]] if name[self].fc_host_initiators begin[:] variable[current_wwns] assign[=] <ast.SetComp object at 0x7da18f58e740> variable[updater] assign[=] call[name[UnityHostInitiatorUpdater], parameter[name[self], binary_operation[name[current_iqns] <ast.BitOr object at 0x7da2590d6aa0> name[current_wwns]], binary_operation[name[iqns] <ast.BitOr object at 0x7da2590d6aa0> name[wwns]]]] return[call[name[updater].update, parameter[]]]
keyword[def] identifier[update_initiators] ( identifier[self] , identifier[iqns] = keyword[None] , identifier[wwns] = keyword[None] ): literal[string] identifier[iqns] = identifier[set] ( identifier[iqns] ) keyword[if] identifier[iqns] keyword[else] identifier[set] () identifier[current_iqns] = identifier[set] () keyword[if] identifier[self] . identifier[iscsi_host_initiators] : identifier[current_iqns] ={ identifier[initiator] . identifier[initiator_id] keyword[for] identifier[initiator] keyword[in] identifier[self] . identifier[iscsi_host_initiators] } identifier[wwns] = identifier[set] ( identifier[wwns] ) keyword[if] identifier[wwns] keyword[else] identifier[set] () identifier[current_wwns] = identifier[set] () keyword[if] identifier[self] . identifier[fc_host_initiators] : identifier[current_wwns] ={ identifier[initiator] . identifier[initiator_id] keyword[for] identifier[initiator] keyword[in] identifier[self] . identifier[fc_host_initiators] } identifier[updater] = identifier[UnityHostInitiatorUpdater] ( identifier[self] , identifier[current_iqns] | identifier[current_wwns] , identifier[iqns] | identifier[wwns] ) keyword[return] identifier[updater] . identifier[update] ()
def update_initiators(self, iqns=None, wwns=None): """Primarily for puppet-unity use. Update the iSCSI and FC initiators if needed. """ # First get current iqns iqns = set(iqns) if iqns else set() current_iqns = set() if self.iscsi_host_initiators: current_iqns = {initiator.initiator_id for initiator in self.iscsi_host_initiators} # depends on [control=['if'], data=[]] # Then get current wwns wwns = set(wwns) if wwns else set() current_wwns = set() if self.fc_host_initiators: current_wwns = {initiator.initiator_id for initiator in self.fc_host_initiators} # depends on [control=['if'], data=[]] updater = UnityHostInitiatorUpdater(self, current_iqns | current_wwns, iqns | wwns) return updater.update()
def flattencopy(lst): """flatten and return a copy of the list indefficient on large lists""" # modified from # http://stackoverflow.com/questions/2158395/flatten-an-irregular-list-of-lists-in-python thelist = copy.deepcopy(lst) list_is_nested = True while list_is_nested: # outer loop keepchecking = False atemp = [] for element in thelist: # inner loop if isinstance(element, list): atemp.extend(element) keepchecking = True else: atemp.append(element) list_is_nested = keepchecking # determine if outer loop exits thelist = atemp[:] return thelist
def function[flattencopy, parameter[lst]]: constant[flatten and return a copy of the list indefficient on large lists] variable[thelist] assign[=] call[name[copy].deepcopy, parameter[name[lst]]] variable[list_is_nested] assign[=] constant[True] while name[list_is_nested] begin[:] variable[keepchecking] assign[=] constant[False] variable[atemp] assign[=] list[[]] for taget[name[element]] in starred[name[thelist]] begin[:] if call[name[isinstance], parameter[name[element], name[list]]] begin[:] call[name[atemp].extend, parameter[name[element]]] variable[keepchecking] assign[=] constant[True] variable[list_is_nested] assign[=] name[keepchecking] variable[thelist] assign[=] call[name[atemp]][<ast.Slice object at 0x7da20c6c47f0>] return[name[thelist]]
keyword[def] identifier[flattencopy] ( identifier[lst] ): literal[string] identifier[thelist] = identifier[copy] . identifier[deepcopy] ( identifier[lst] ) identifier[list_is_nested] = keyword[True] keyword[while] identifier[list_is_nested] : identifier[keepchecking] = keyword[False] identifier[atemp] =[] keyword[for] identifier[element] keyword[in] identifier[thelist] : keyword[if] identifier[isinstance] ( identifier[element] , identifier[list] ): identifier[atemp] . identifier[extend] ( identifier[element] ) identifier[keepchecking] = keyword[True] keyword[else] : identifier[atemp] . identifier[append] ( identifier[element] ) identifier[list_is_nested] = identifier[keepchecking] identifier[thelist] = identifier[atemp] [:] keyword[return] identifier[thelist]
def flattencopy(lst): """flatten and return a copy of the list indefficient on large lists""" # modified from # http://stackoverflow.com/questions/2158395/flatten-an-irregular-list-of-lists-in-python thelist = copy.deepcopy(lst) list_is_nested = True while list_is_nested: # outer loop keepchecking = False atemp = [] for element in thelist: # inner loop if isinstance(element, list): atemp.extend(element) keepchecking = True # depends on [control=['if'], data=[]] else: atemp.append(element) # depends on [control=['for'], data=['element']] list_is_nested = keepchecking # determine if outer loop exits thelist = atemp[:] # depends on [control=['while'], data=[]] return thelist
def neighbor_atoms(self, atom_symbol=None): """Access neighbor atoms. :param str atom_symbol: Atom symbol. :return: List of neighbor atoms. :rtype: :py:class:`list`. """ if not atom_symbol: return self.neighbors else: return [atom for atom in self.neighbors if atom['atom_symbol'] == atom_symbol]
def function[neighbor_atoms, parameter[self, atom_symbol]]: constant[Access neighbor atoms. :param str atom_symbol: Atom symbol. :return: List of neighbor atoms. :rtype: :py:class:`list`. ] if <ast.UnaryOp object at 0x7da1b2462260> begin[:] return[name[self].neighbors]
keyword[def] identifier[neighbor_atoms] ( identifier[self] , identifier[atom_symbol] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[atom_symbol] : keyword[return] identifier[self] . identifier[neighbors] keyword[else] : keyword[return] [ identifier[atom] keyword[for] identifier[atom] keyword[in] identifier[self] . identifier[neighbors] keyword[if] identifier[atom] [ literal[string] ]== identifier[atom_symbol] ]
def neighbor_atoms(self, atom_symbol=None): """Access neighbor atoms. :param str atom_symbol: Atom symbol. :return: List of neighbor atoms. :rtype: :py:class:`list`. """ if not atom_symbol: return self.neighbors # depends on [control=['if'], data=[]] else: return [atom for atom in self.neighbors if atom['atom_symbol'] == atom_symbol]
def update_elements(self, line, column, charcount, docdelta=0): """Updates all the element instances that are children of this module to have new start and end charindex values based on an operation that was performed on the module source code. :arg line: the line number of the *start* of the operation. :arg column: the column number of the start of the operation. :arg charcount: the total character length change from the operation. :arg docdelta: the character length of changes made to types/execs that are children of the module whose docstrings external to their definitions were changed. """ target = self.charindex(line, column) + charcount #We are looking for all the instances whose *start* attribute lies #after this target. Then we update them all by that amount. #However, we need to be careful because of docdelta. If an elements #docstring contains the target, we don't want to update it. if line < self.contains_index: for t in self.types: if self._update_char_check(self.types[t], target, docdelta): self._element_charfix(self.types[t], charcount) for m in self.members: if self.members[m].start > target: self.members[m].start += charcount self.members[m].end += charcount self._contains_index = None else: for iexec in self.executables: if self._update_char_check(self.executables[iexec], target, docdelta): self._element_charfix(self.executables[iexec], charcount)
def function[update_elements, parameter[self, line, column, charcount, docdelta]]: constant[Updates all the element instances that are children of this module to have new start and end charindex values based on an operation that was performed on the module source code. :arg line: the line number of the *start* of the operation. :arg column: the column number of the start of the operation. :arg charcount: the total character length change from the operation. :arg docdelta: the character length of changes made to types/execs that are children of the module whose docstrings external to their definitions were changed. ] variable[target] assign[=] binary_operation[call[name[self].charindex, parameter[name[line], name[column]]] + name[charcount]] if compare[name[line] less[<] name[self].contains_index] begin[:] for taget[name[t]] in starred[name[self].types] begin[:] if call[name[self]._update_char_check, parameter[call[name[self].types][name[t]], name[target], name[docdelta]]] begin[:] call[name[self]._element_charfix, parameter[call[name[self].types][name[t]], name[charcount]]] for taget[name[m]] in starred[name[self].members] begin[:] if compare[call[name[self].members][name[m]].start greater[>] name[target]] begin[:] <ast.AugAssign object at 0x7da1b26596c0> <ast.AugAssign object at 0x7da1b265ab30> name[self]._contains_index assign[=] constant[None]
keyword[def] identifier[update_elements] ( identifier[self] , identifier[line] , identifier[column] , identifier[charcount] , identifier[docdelta] = literal[int] ): literal[string] identifier[target] = identifier[self] . identifier[charindex] ( identifier[line] , identifier[column] )+ identifier[charcount] keyword[if] identifier[line] < identifier[self] . identifier[contains_index] : keyword[for] identifier[t] keyword[in] identifier[self] . identifier[types] : keyword[if] identifier[self] . identifier[_update_char_check] ( identifier[self] . identifier[types] [ identifier[t] ], identifier[target] , identifier[docdelta] ): identifier[self] . identifier[_element_charfix] ( identifier[self] . identifier[types] [ identifier[t] ], identifier[charcount] ) keyword[for] identifier[m] keyword[in] identifier[self] . identifier[members] : keyword[if] identifier[self] . identifier[members] [ identifier[m] ]. identifier[start] > identifier[target] : identifier[self] . identifier[members] [ identifier[m] ]. identifier[start] += identifier[charcount] identifier[self] . identifier[members] [ identifier[m] ]. identifier[end] += identifier[charcount] identifier[self] . identifier[_contains_index] = keyword[None] keyword[else] : keyword[for] identifier[iexec] keyword[in] identifier[self] . identifier[executables] : keyword[if] identifier[self] . identifier[_update_char_check] ( identifier[self] . identifier[executables] [ identifier[iexec] ], identifier[target] , identifier[docdelta] ): identifier[self] . identifier[_element_charfix] ( identifier[self] . identifier[executables] [ identifier[iexec] ], identifier[charcount] )
def update_elements(self, line, column, charcount, docdelta=0): """Updates all the element instances that are children of this module to have new start and end charindex values based on an operation that was performed on the module source code. :arg line: the line number of the *start* of the operation. :arg column: the column number of the start of the operation. :arg charcount: the total character length change from the operation. :arg docdelta: the character length of changes made to types/execs that are children of the module whose docstrings external to their definitions were changed. """ target = self.charindex(line, column) + charcount #We are looking for all the instances whose *start* attribute lies #after this target. Then we update them all by that amount. #However, we need to be careful because of docdelta. If an elements #docstring contains the target, we don't want to update it. if line < self.contains_index: for t in self.types: if self._update_char_check(self.types[t], target, docdelta): self._element_charfix(self.types[t], charcount) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['t']] for m in self.members: if self.members[m].start > target: self.members[m].start += charcount self.members[m].end += charcount # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] self._contains_index = None # depends on [control=['if'], data=[]] else: for iexec in self.executables: if self._update_char_check(self.executables[iexec], target, docdelta): self._element_charfix(self.executables[iexec], charcount) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['iexec']]
def delete_dependency(self, from_task_name, to_task_name): """ Delete a dependency between two tasks. """ logger.debug('Deleting dependency from {0} to {1}'.format(from_task_name, to_task_name)) if not self.state.allow_change_graph: raise DagobahError("job's graph is immutable in its current state: %s" % self.state.status) self.delete_edge(from_task_name, to_task_name) self.commit()
def function[delete_dependency, parameter[self, from_task_name, to_task_name]]: constant[ Delete a dependency between two tasks. ] call[name[logger].debug, parameter[call[constant[Deleting dependency from {0} to {1}].format, parameter[name[from_task_name], name[to_task_name]]]]] if <ast.UnaryOp object at 0x7da1b0cf68f0> begin[:] <ast.Raise object at 0x7da1b0cf6200> call[name[self].delete_edge, parameter[name[from_task_name], name[to_task_name]]] call[name[self].commit, parameter[]]
keyword[def] identifier[delete_dependency] ( identifier[self] , identifier[from_task_name] , identifier[to_task_name] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[from_task_name] , identifier[to_task_name] )) keyword[if] keyword[not] identifier[self] . identifier[state] . identifier[allow_change_graph] : keyword[raise] identifier[DagobahError] ( literal[string] % identifier[self] . identifier[state] . identifier[status] ) identifier[self] . identifier[delete_edge] ( identifier[from_task_name] , identifier[to_task_name] ) identifier[self] . identifier[commit] ()
def delete_dependency(self, from_task_name, to_task_name): """ Delete a dependency between two tasks. """ logger.debug('Deleting dependency from {0} to {1}'.format(from_task_name, to_task_name)) if not self.state.allow_change_graph: raise DagobahError("job's graph is immutable in its current state: %s" % self.state.status) # depends on [control=['if'], data=[]] self.delete_edge(from_task_name, to_task_name) self.commit()
def initialize(self, configfile=None): """ Initialize the module """ method = "initialize" A = None metadata = {method: configfile} send_array(self.socket, A, metadata) A, metadata = recv_array( self.socket, poll=self.poll, poll_timeout=self.poll_timeout, flags=self.zmq_flags)
def function[initialize, parameter[self, configfile]]: constant[ Initialize the module ] variable[method] assign[=] constant[initialize] variable[A] assign[=] constant[None] variable[metadata] assign[=] dictionary[[<ast.Name object at 0x7da20c794a90>], [<ast.Name object at 0x7da20c7959f0>]] call[name[send_array], parameter[name[self].socket, name[A], name[metadata]]] <ast.Tuple object at 0x7da20c794430> assign[=] call[name[recv_array], parameter[name[self].socket]]
keyword[def] identifier[initialize] ( identifier[self] , identifier[configfile] = keyword[None] ): literal[string] identifier[method] = literal[string] identifier[A] = keyword[None] identifier[metadata] ={ identifier[method] : identifier[configfile] } identifier[send_array] ( identifier[self] . identifier[socket] , identifier[A] , identifier[metadata] ) identifier[A] , identifier[metadata] = identifier[recv_array] ( identifier[self] . identifier[socket] , identifier[poll] = identifier[self] . identifier[poll] , identifier[poll_timeout] = identifier[self] . identifier[poll_timeout] , identifier[flags] = identifier[self] . identifier[zmq_flags] )
def initialize(self, configfile=None): """ Initialize the module """ method = 'initialize' A = None metadata = {method: configfile} send_array(self.socket, A, metadata) (A, metadata) = recv_array(self.socket, poll=self.poll, poll_timeout=self.poll_timeout, flags=self.zmq_flags)
def get_activity_ids_by_objective_bank(self, objective_bank_id): """Gets the list of ``Activity`` ``Ids`` associated with an ``ObjectiveBank``. arg: objective_bank_id (osid.id.Id): ``Id`` of the ``ObjectiveBank`` return: (osid.id.IdList) - list of related activity ``Ids`` raise: NotFound - ``objective_bank_id`` is not found raise: NullArgument - ``objective_bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceBinSession.get_resource_ids_by_bin id_list = [] for activity in self.get_activities_by_objective_bank(objective_bank_id): id_list.append(activity.get_id()) return IdList(id_list)
def function[get_activity_ids_by_objective_bank, parameter[self, objective_bank_id]]: constant[Gets the list of ``Activity`` ``Ids`` associated with an ``ObjectiveBank``. arg: objective_bank_id (osid.id.Id): ``Id`` of the ``ObjectiveBank`` return: (osid.id.IdList) - list of related activity ``Ids`` raise: NotFound - ``objective_bank_id`` is not found raise: NullArgument - ``objective_bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* ] variable[id_list] assign[=] list[[]] for taget[name[activity]] in starred[call[name[self].get_activities_by_objective_bank, parameter[name[objective_bank_id]]]] begin[:] call[name[id_list].append, parameter[call[name[activity].get_id, parameter[]]]] return[call[name[IdList], parameter[name[id_list]]]]
keyword[def] identifier[get_activity_ids_by_objective_bank] ( identifier[self] , identifier[objective_bank_id] ): literal[string] identifier[id_list] =[] keyword[for] identifier[activity] keyword[in] identifier[self] . identifier[get_activities_by_objective_bank] ( identifier[objective_bank_id] ): identifier[id_list] . identifier[append] ( identifier[activity] . identifier[get_id] ()) keyword[return] identifier[IdList] ( identifier[id_list] )
def get_activity_ids_by_objective_bank(self, objective_bank_id): """Gets the list of ``Activity`` ``Ids`` associated with an ``ObjectiveBank``. arg: objective_bank_id (osid.id.Id): ``Id`` of the ``ObjectiveBank`` return: (osid.id.IdList) - list of related activity ``Ids`` raise: NotFound - ``objective_bank_id`` is not found raise: NullArgument - ``objective_bank_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceBinSession.get_resource_ids_by_bin id_list = [] for activity in self.get_activities_by_objective_bank(objective_bank_id): id_list.append(activity.get_id()) # depends on [control=['for'], data=['activity']] return IdList(id_list)
def fromcsv(source=None, encoding=None, errors='strict', header=None, **csvargs): """ Extract a table from a delimited file. E.g.:: >>> import petl as etl >>> import csv >>> # set up a CSV file to demonstrate with ... table1 = [['foo', 'bar'], ... ['a', 1], ... ['b', 2], ... ['c', 2]] >>> with open('example.csv', 'w') as f: ... writer = csv.writer(f) ... writer.writerows(table1) ... >>> # now demonstrate the use of fromcsv() ... table2 = etl.fromcsv('example.csv') >>> table2 +-----+-----+ | foo | bar | +=====+=====+ | 'a' | '1' | +-----+-----+ | 'b' | '2' | +-----+-----+ | 'c' | '2' | +-----+-----+ The `source` argument is the path of the delimited file, all other keyword arguments are passed to :func:`csv.reader`. So, e.g., to override the delimiter from the default CSV dialect, provide the `delimiter` keyword argument. Note that all data values are strings, and any intended numeric values will need to be converted, see also :func:`petl.transform.conversions.convert`. """ source = read_source_from_arg(source) csvargs.setdefault('dialect', 'excel') return fromcsv_impl(source=source, encoding=encoding, errors=errors, header=header, **csvargs)
def function[fromcsv, parameter[source, encoding, errors, header]]: constant[ Extract a table from a delimited file. E.g.:: >>> import petl as etl >>> import csv >>> # set up a CSV file to demonstrate with ... table1 = [['foo', 'bar'], ... ['a', 1], ... ['b', 2], ... ['c', 2]] >>> with open('example.csv', 'w') as f: ... writer = csv.writer(f) ... writer.writerows(table1) ... >>> # now demonstrate the use of fromcsv() ... table2 = etl.fromcsv('example.csv') >>> table2 +-----+-----+ | foo | bar | +=====+=====+ | 'a' | '1' | +-----+-----+ | 'b' | '2' | +-----+-----+ | 'c' | '2' | +-----+-----+ The `source` argument is the path of the delimited file, all other keyword arguments are passed to :func:`csv.reader`. So, e.g., to override the delimiter from the default CSV dialect, provide the `delimiter` keyword argument. Note that all data values are strings, and any intended numeric values will need to be converted, see also :func:`petl.transform.conversions.convert`. ] variable[source] assign[=] call[name[read_source_from_arg], parameter[name[source]]] call[name[csvargs].setdefault, parameter[constant[dialect], constant[excel]]] return[call[name[fromcsv_impl], parameter[]]]
keyword[def] identifier[fromcsv] ( identifier[source] = keyword[None] , identifier[encoding] = keyword[None] , identifier[errors] = literal[string] , identifier[header] = keyword[None] , ** identifier[csvargs] ): literal[string] identifier[source] = identifier[read_source_from_arg] ( identifier[source] ) identifier[csvargs] . identifier[setdefault] ( literal[string] , literal[string] ) keyword[return] identifier[fromcsv_impl] ( identifier[source] = identifier[source] , identifier[encoding] = identifier[encoding] , identifier[errors] = identifier[errors] , identifier[header] = identifier[header] ,** identifier[csvargs] )
def fromcsv(source=None, encoding=None, errors='strict', header=None, **csvargs): """ Extract a table from a delimited file. E.g.:: >>> import petl as etl >>> import csv >>> # set up a CSV file to demonstrate with ... table1 = [['foo', 'bar'], ... ['a', 1], ... ['b', 2], ... ['c', 2]] >>> with open('example.csv', 'w') as f: ... writer = csv.writer(f) ... writer.writerows(table1) ... >>> # now demonstrate the use of fromcsv() ... table2 = etl.fromcsv('example.csv') >>> table2 +-----+-----+ | foo | bar | +=====+=====+ | 'a' | '1' | +-----+-----+ | 'b' | '2' | +-----+-----+ | 'c' | '2' | +-----+-----+ The `source` argument is the path of the delimited file, all other keyword arguments are passed to :func:`csv.reader`. So, e.g., to override the delimiter from the default CSV dialect, provide the `delimiter` keyword argument. Note that all data values are strings, and any intended numeric values will need to be converted, see also :func:`petl.transform.conversions.convert`. """ source = read_source_from_arg(source) csvargs.setdefault('dialect', 'excel') return fromcsv_impl(source=source, encoding=encoding, errors=errors, header=header, **csvargs)
def set_placeholder(self, text): """Set the placeholder text that will be displayed when the text is empty and the widget is out of focus :param text: The text for the placeholder :type text: str :raises: None """ if self._placeholder != text: self._placeholder = text if not self.hasFocus(): self.update()
def function[set_placeholder, parameter[self, text]]: constant[Set the placeholder text that will be displayed when the text is empty and the widget is out of focus :param text: The text for the placeholder :type text: str :raises: None ] if compare[name[self]._placeholder not_equal[!=] name[text]] begin[:] name[self]._placeholder assign[=] name[text] if <ast.UnaryOp object at 0x7da20e9544f0> begin[:] call[name[self].update, parameter[]]
keyword[def] identifier[set_placeholder] ( identifier[self] , identifier[text] ): literal[string] keyword[if] identifier[self] . identifier[_placeholder] != identifier[text] : identifier[self] . identifier[_placeholder] = identifier[text] keyword[if] keyword[not] identifier[self] . identifier[hasFocus] (): identifier[self] . identifier[update] ()
def set_placeholder(self, text): """Set the placeholder text that will be displayed when the text is empty and the widget is out of focus :param text: The text for the placeholder :type text: str :raises: None """ if self._placeholder != text: self._placeholder = text if not self.hasFocus(): self.update() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['text']]
def get_lldp_neighbors_detail(self, interface=""): """Detailed view of the LLDP neighbors.""" lldp_neighbors = defaultdict(list) lldp_table = junos_views.junos_lldp_neighbors_detail_table(self.device) if not interface: try: lldp_table.get() except RpcError as rpcerr: # this assumes the library runs in an environment # able to handle logs # otherwise, the user just won't see this happening log.error("Unable to retrieve the LLDP neighbors information:") log.error(py23_compat.text_type(rpcerr)) return {} interfaces = lldp_table.get().keys() else: interfaces = [interface] if self.device.facts.get("switch_style") == "VLAN": lldp_table.GET_RPC = "get-lldp-interface-neighbors-information" interface_variable = "interface_name" alt_rpc = "get-lldp-interface-neighbors" alt_interface_variable = "interface_device" else: lldp_table.GET_RPC = "get-lldp-interface-neighbors" interface_variable = "interface_device" alt_rpc = "get-lldp-interface-neighbors-information" alt_interface_variable = "interface_name" for interface in interfaces: try: interface_args = {interface_variable: interface} lldp_table.get(**interface_args) except RpcError as e: if "syntax error" in e.message: # Looks like we need to call a different RPC on this device # Switch to the alternate style lldp_table.GET_RPC = alt_rpc interface_variable = alt_interface_variable # Retry interface_args = {interface_variable: interface} lldp_table.get(**interface_args) for item in lldp_table: lldp_neighbors[interface].append( { "parent_interface": item.parent_interface, "remote_port": item.remote_port or "", "remote_chassis_id": napalm.base.helpers.convert( napalm.base.helpers.mac, item.remote_chassis_id, item.remote_chassis_id, ), "remote_port_description": napalm.base.helpers.convert( py23_compat.text_type, item.remote_port_description ), "remote_system_name": item.remote_system_name, "remote_system_description": item.remote_system_description, "remote_system_capab": self._transform_lldp_capab( item.remote_system_capab ), "remote_system_enable_capab": self._transform_lldp_capab( item.remote_system_enable_capab ), } ) return lldp_neighbors
def function[get_lldp_neighbors_detail, parameter[self, interface]]: constant[Detailed view of the LLDP neighbors.] variable[lldp_neighbors] assign[=] call[name[defaultdict], parameter[name[list]]] variable[lldp_table] assign[=] call[name[junos_views].junos_lldp_neighbors_detail_table, parameter[name[self].device]] if <ast.UnaryOp object at 0x7da1b1cc2950> begin[:] <ast.Try object at 0x7da1b1cc0640> variable[interfaces] assign[=] call[call[name[lldp_table].get, parameter[]].keys, parameter[]] if compare[call[name[self].device.facts.get, parameter[constant[switch_style]]] equal[==] constant[VLAN]] begin[:] name[lldp_table].GET_RPC assign[=] constant[get-lldp-interface-neighbors-information] variable[interface_variable] assign[=] constant[interface_name] variable[alt_rpc] assign[=] constant[get-lldp-interface-neighbors] variable[alt_interface_variable] assign[=] constant[interface_device] for taget[name[interface]] in starred[name[interfaces]] begin[:] <ast.Try object at 0x7da1b1cc3f10> for taget[name[item]] in starred[name[lldp_table]] begin[:] call[call[name[lldp_neighbors]][name[interface]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b1cc1630>, <ast.Constant object at 0x7da1b1cc2110>, <ast.Constant object at 0x7da1b1cc07f0>, <ast.Constant object at 0x7da1b1cc3d30>, <ast.Constant object at 0x7da1b1cc1960>, <ast.Constant object at 0x7da1b1cc37f0>, <ast.Constant object at 0x7da1b1cc2200>, <ast.Constant object at 0x7da1b1cc0670>], [<ast.Attribute object at 0x7da1b1cc38e0>, <ast.BoolOp object at 0x7da1b1cc02b0>, <ast.Call object at 0x7da1b1cc3940>, <ast.Call object at 0x7da1b1cc2800>, <ast.Attribute object at 0x7da1b1cc0be0>, <ast.Attribute object at 0x7da1b1cc28c0>, <ast.Call object at 0x7da1b1ceff70>, <ast.Call object at 0x7da1b1ceccd0>]]]] return[name[lldp_neighbors]]
keyword[def] identifier[get_lldp_neighbors_detail] ( identifier[self] , identifier[interface] = literal[string] ): literal[string] identifier[lldp_neighbors] = identifier[defaultdict] ( identifier[list] ) identifier[lldp_table] = identifier[junos_views] . identifier[junos_lldp_neighbors_detail_table] ( identifier[self] . identifier[device] ) keyword[if] keyword[not] identifier[interface] : keyword[try] : identifier[lldp_table] . identifier[get] () keyword[except] identifier[RpcError] keyword[as] identifier[rpcerr] : identifier[log] . identifier[error] ( literal[string] ) identifier[log] . identifier[error] ( identifier[py23_compat] . identifier[text_type] ( identifier[rpcerr] )) keyword[return] {} identifier[interfaces] = identifier[lldp_table] . identifier[get] (). identifier[keys] () keyword[else] : identifier[interfaces] =[ identifier[interface] ] keyword[if] identifier[self] . identifier[device] . identifier[facts] . identifier[get] ( literal[string] )== literal[string] : identifier[lldp_table] . identifier[GET_RPC] = literal[string] identifier[interface_variable] = literal[string] identifier[alt_rpc] = literal[string] identifier[alt_interface_variable] = literal[string] keyword[else] : identifier[lldp_table] . identifier[GET_RPC] = literal[string] identifier[interface_variable] = literal[string] identifier[alt_rpc] = literal[string] identifier[alt_interface_variable] = literal[string] keyword[for] identifier[interface] keyword[in] identifier[interfaces] : keyword[try] : identifier[interface_args] ={ identifier[interface_variable] : identifier[interface] } identifier[lldp_table] . identifier[get] (** identifier[interface_args] ) keyword[except] identifier[RpcError] keyword[as] identifier[e] : keyword[if] literal[string] keyword[in] identifier[e] . identifier[message] : identifier[lldp_table] . identifier[GET_RPC] = identifier[alt_rpc] identifier[interface_variable] = identifier[alt_interface_variable] identifier[interface_args] ={ identifier[interface_variable] : identifier[interface] } identifier[lldp_table] . identifier[get] (** identifier[interface_args] ) keyword[for] identifier[item] keyword[in] identifier[lldp_table] : identifier[lldp_neighbors] [ identifier[interface] ]. identifier[append] ( { literal[string] : identifier[item] . identifier[parent_interface] , literal[string] : identifier[item] . identifier[remote_port] keyword[or] literal[string] , literal[string] : identifier[napalm] . identifier[base] . identifier[helpers] . identifier[convert] ( identifier[napalm] . identifier[base] . identifier[helpers] . identifier[mac] , identifier[item] . identifier[remote_chassis_id] , identifier[item] . identifier[remote_chassis_id] , ), literal[string] : identifier[napalm] . identifier[base] . identifier[helpers] . identifier[convert] ( identifier[py23_compat] . identifier[text_type] , identifier[item] . identifier[remote_port_description] ), literal[string] : identifier[item] . identifier[remote_system_name] , literal[string] : identifier[item] . identifier[remote_system_description] , literal[string] : identifier[self] . identifier[_transform_lldp_capab] ( identifier[item] . identifier[remote_system_capab] ), literal[string] : identifier[self] . identifier[_transform_lldp_capab] ( identifier[item] . identifier[remote_system_enable_capab] ), } ) keyword[return] identifier[lldp_neighbors]
def get_lldp_neighbors_detail(self, interface=''): """Detailed view of the LLDP neighbors.""" lldp_neighbors = defaultdict(list) lldp_table = junos_views.junos_lldp_neighbors_detail_table(self.device) if not interface: try: lldp_table.get() # depends on [control=['try'], data=[]] except RpcError as rpcerr: # this assumes the library runs in an environment # able to handle logs # otherwise, the user just won't see this happening log.error('Unable to retrieve the LLDP neighbors information:') log.error(py23_compat.text_type(rpcerr)) return {} # depends on [control=['except'], data=['rpcerr']] interfaces = lldp_table.get().keys() # depends on [control=['if'], data=[]] else: interfaces = [interface] if self.device.facts.get('switch_style') == 'VLAN': lldp_table.GET_RPC = 'get-lldp-interface-neighbors-information' interface_variable = 'interface_name' alt_rpc = 'get-lldp-interface-neighbors' alt_interface_variable = 'interface_device' # depends on [control=['if'], data=[]] else: lldp_table.GET_RPC = 'get-lldp-interface-neighbors' interface_variable = 'interface_device' alt_rpc = 'get-lldp-interface-neighbors-information' alt_interface_variable = 'interface_name' for interface in interfaces: try: interface_args = {interface_variable: interface} lldp_table.get(**interface_args) # depends on [control=['try'], data=[]] except RpcError as e: if 'syntax error' in e.message: # Looks like we need to call a different RPC on this device # Switch to the alternate style lldp_table.GET_RPC = alt_rpc interface_variable = alt_interface_variable # Retry interface_args = {interface_variable: interface} lldp_table.get(**interface_args) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] for item in lldp_table: lldp_neighbors[interface].append({'parent_interface': item.parent_interface, 'remote_port': item.remote_port or '', 'remote_chassis_id': napalm.base.helpers.convert(napalm.base.helpers.mac, item.remote_chassis_id, item.remote_chassis_id), 'remote_port_description': napalm.base.helpers.convert(py23_compat.text_type, item.remote_port_description), 'remote_system_name': item.remote_system_name, 'remote_system_description': item.remote_system_description, 'remote_system_capab': self._transform_lldp_capab(item.remote_system_capab), 'remote_system_enable_capab': self._transform_lldp_capab(item.remote_system_enable_capab)}) # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['interface']] return lldp_neighbors
def loss(logits, labels, batch_size=None): """Adds all losses for the model. Note the final loss is not returned. Instead, the list of losses are collected by slim.losses. The losses are accumulated in tower_loss() and summed to calculate the total loss. Args: logits: List of logits from inference(). Each entry is a 2-D float Tensor. labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [batch_size] batch_size: integer """ if not batch_size: batch_size = FLAGS.batch_size # Reshape the labels into a dense Tensor of # shape [FLAGS.batch_size, num_classes]. sparse_labels = tf.reshape(labels, [batch_size, 1]) indices = tf.reshape(tf.range(batch_size), [batch_size, 1]) concated = tf.concat(axis=1, values=[indices, sparse_labels]) num_classes = logits[0].get_shape()[-1].value dense_labels = tf.sparse_to_dense(concated, [batch_size, num_classes], 1.0, 0.0) # Cross entropy loss for the main softmax prediction. slim.losses.cross_entropy_loss(logits[0], dense_labels, label_smoothing=0.1, weight=1.0) # Cross entropy loss for the auxiliary softmax head. slim.losses.cross_entropy_loss(logits[1], dense_labels, label_smoothing=0.1, weight=0.4, scope='aux_loss')
def function[loss, parameter[logits, labels, batch_size]]: constant[Adds all losses for the model. Note the final loss is not returned. Instead, the list of losses are collected by slim.losses. The losses are accumulated in tower_loss() and summed to calculate the total loss. Args: logits: List of logits from inference(). Each entry is a 2-D float Tensor. labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [batch_size] batch_size: integer ] if <ast.UnaryOp object at 0x7da18f00f250> begin[:] variable[batch_size] assign[=] name[FLAGS].batch_size variable[sparse_labels] assign[=] call[name[tf].reshape, parameter[name[labels], list[[<ast.Name object at 0x7da18f00fa90>, <ast.Constant object at 0x7da18f00ded0>]]]] variable[indices] assign[=] call[name[tf].reshape, parameter[call[name[tf].range, parameter[name[batch_size]]], list[[<ast.Name object at 0x7da18f00f040>, <ast.Constant object at 0x7da18f00f5e0>]]]] variable[concated] assign[=] call[name[tf].concat, parameter[]] variable[num_classes] assign[=] call[call[call[name[logits]][constant[0]].get_shape, parameter[]]][<ast.UnaryOp object at 0x7da18f00d2d0>].value variable[dense_labels] assign[=] call[name[tf].sparse_to_dense, parameter[name[concated], list[[<ast.Name object at 0x7da18f00f940>, <ast.Name object at 0x7da18f00f400>]], constant[1.0], constant[0.0]]] call[name[slim].losses.cross_entropy_loss, parameter[call[name[logits]][constant[0]], name[dense_labels]]] call[name[slim].losses.cross_entropy_loss, parameter[call[name[logits]][constant[1]], name[dense_labels]]]
keyword[def] identifier[loss] ( identifier[logits] , identifier[labels] , identifier[batch_size] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[batch_size] : identifier[batch_size] = identifier[FLAGS] . identifier[batch_size] identifier[sparse_labels] = identifier[tf] . identifier[reshape] ( identifier[labels] ,[ identifier[batch_size] , literal[int] ]) identifier[indices] = identifier[tf] . identifier[reshape] ( identifier[tf] . identifier[range] ( identifier[batch_size] ),[ identifier[batch_size] , literal[int] ]) identifier[concated] = identifier[tf] . identifier[concat] ( identifier[axis] = literal[int] , identifier[values] =[ identifier[indices] , identifier[sparse_labels] ]) identifier[num_classes] = identifier[logits] [ literal[int] ]. identifier[get_shape] ()[- literal[int] ]. identifier[value] identifier[dense_labels] = identifier[tf] . identifier[sparse_to_dense] ( identifier[concated] , [ identifier[batch_size] , identifier[num_classes] ], literal[int] , literal[int] ) identifier[slim] . identifier[losses] . identifier[cross_entropy_loss] ( identifier[logits] [ literal[int] ], identifier[dense_labels] , identifier[label_smoothing] = literal[int] , identifier[weight] = literal[int] ) identifier[slim] . identifier[losses] . identifier[cross_entropy_loss] ( identifier[logits] [ literal[int] ], identifier[dense_labels] , identifier[label_smoothing] = literal[int] , identifier[weight] = literal[int] , identifier[scope] = literal[string] )
def loss(logits, labels, batch_size=None): """Adds all losses for the model. Note the final loss is not returned. Instead, the list of losses are collected by slim.losses. The losses are accumulated in tower_loss() and summed to calculate the total loss. Args: logits: List of logits from inference(). Each entry is a 2-D float Tensor. labels: Labels from distorted_inputs or inputs(). 1-D tensor of shape [batch_size] batch_size: integer """ if not batch_size: batch_size = FLAGS.batch_size # depends on [control=['if'], data=[]] # Reshape the labels into a dense Tensor of # shape [FLAGS.batch_size, num_classes]. sparse_labels = tf.reshape(labels, [batch_size, 1]) indices = tf.reshape(tf.range(batch_size), [batch_size, 1]) concated = tf.concat(axis=1, values=[indices, sparse_labels]) num_classes = logits[0].get_shape()[-1].value dense_labels = tf.sparse_to_dense(concated, [batch_size, num_classes], 1.0, 0.0) # Cross entropy loss for the main softmax prediction. slim.losses.cross_entropy_loss(logits[0], dense_labels, label_smoothing=0.1, weight=1.0) # Cross entropy loss for the auxiliary softmax head. slim.losses.cross_entropy_loss(logits[1], dense_labels, label_smoothing=0.1, weight=0.4, scope='aux_loss')
def pip_command_output(pip_args): """ Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results """ import sys import pip from io import StringIO # as pip will write to stdout we use some nasty hacks # to substitute system stdout with our own old_stdout = sys.stdout sys.stdout = mystdout = StringIO() pip.main(pip_args) output = mystdout.getvalue() mystdout.truncate(0) sys.stdout = old_stdout return output
def function[pip_command_output, parameter[pip_args]]: constant[ Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results ] import module[sys] import module[pip] from relative_module[io] import module[StringIO] variable[old_stdout] assign[=] name[sys].stdout name[sys].stdout assign[=] call[name[StringIO], parameter[]] call[name[pip].main, parameter[name[pip_args]]] variable[output] assign[=] call[name[mystdout].getvalue, parameter[]] call[name[mystdout].truncate, parameter[constant[0]]] name[sys].stdout assign[=] name[old_stdout] return[name[output]]
keyword[def] identifier[pip_command_output] ( identifier[pip_args] ): literal[string] keyword[import] identifier[sys] keyword[import] identifier[pip] keyword[from] identifier[io] keyword[import] identifier[StringIO] identifier[old_stdout] = identifier[sys] . identifier[stdout] identifier[sys] . identifier[stdout] = identifier[mystdout] = identifier[StringIO] () identifier[pip] . identifier[main] ( identifier[pip_args] ) identifier[output] = identifier[mystdout] . identifier[getvalue] () identifier[mystdout] . identifier[truncate] ( literal[int] ) identifier[sys] . identifier[stdout] = identifier[old_stdout] keyword[return] identifier[output]
def pip_command_output(pip_args): """ Get output (as a string) from pip command :param pip_args: list o pip switches to pass :return: string with results """ import sys import pip from io import StringIO # as pip will write to stdout we use some nasty hacks # to substitute system stdout with our own old_stdout = sys.stdout sys.stdout = mystdout = StringIO() pip.main(pip_args) output = mystdout.getvalue() mystdout.truncate(0) sys.stdout = old_stdout return output
def _AddEnumValues(descriptor, cls): """Sets class-level attributes for all enum fields defined in this message. Also exporting a class-level object that can name enum values. Args: descriptor: Descriptor object for this message type. cls: Class we're constructing for this message type. """ for enum_type in descriptor.enum_types: setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) for enum_value in enum_type.values: setattr(cls, enum_value.name, enum_value.number)
def function[_AddEnumValues, parameter[descriptor, cls]]: constant[Sets class-level attributes for all enum fields defined in this message. Also exporting a class-level object that can name enum values. Args: descriptor: Descriptor object for this message type. cls: Class we're constructing for this message type. ] for taget[name[enum_type]] in starred[name[descriptor].enum_types] begin[:] call[name[setattr], parameter[name[cls], name[enum_type].name, call[name[enum_type_wrapper].EnumTypeWrapper, parameter[name[enum_type]]]]] for taget[name[enum_value]] in starred[name[enum_type].values] begin[:] call[name[setattr], parameter[name[cls], name[enum_value].name, name[enum_value].number]]
keyword[def] identifier[_AddEnumValues] ( identifier[descriptor] , identifier[cls] ): literal[string] keyword[for] identifier[enum_type] keyword[in] identifier[descriptor] . identifier[enum_types] : identifier[setattr] ( identifier[cls] , identifier[enum_type] . identifier[name] , identifier[enum_type_wrapper] . identifier[EnumTypeWrapper] ( identifier[enum_type] )) keyword[for] identifier[enum_value] keyword[in] identifier[enum_type] . identifier[values] : identifier[setattr] ( identifier[cls] , identifier[enum_value] . identifier[name] , identifier[enum_value] . identifier[number] )
def _AddEnumValues(descriptor, cls): """Sets class-level attributes for all enum fields defined in this message. Also exporting a class-level object that can name enum values. Args: descriptor: Descriptor object for this message type. cls: Class we're constructing for this message type. """ for enum_type in descriptor.enum_types: setattr(cls, enum_type.name, enum_type_wrapper.EnumTypeWrapper(enum_type)) for enum_value in enum_type.values: setattr(cls, enum_value.name, enum_value.number) # depends on [control=['for'], data=['enum_value']] # depends on [control=['for'], data=['enum_type']]
def format(self, exclude_class=False): """Format this exception as a string including class name. Args: exclude_class (bool): Whether to exclude the exception class name when formatting this exception Returns: string: a multiline string with the message, class name and key value parameters passed to create the exception. """ if exclude_class: msg = self.msg else: msg = "%s: %s" % (self.__class__.__name__, self.msg) if len(self.params) != 0: paramstring = "\n".join([str(key) + ": " + str(val) for key, val in self.params.items()]) msg += "\nAdditional Information:\n" + paramstring return msg
def function[format, parameter[self, exclude_class]]: constant[Format this exception as a string including class name. Args: exclude_class (bool): Whether to exclude the exception class name when formatting this exception Returns: string: a multiline string with the message, class name and key value parameters passed to create the exception. ] if name[exclude_class] begin[:] variable[msg] assign[=] name[self].msg if compare[call[name[len], parameter[name[self].params]] not_equal[!=] constant[0]] begin[:] variable[paramstring] assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da1b0349240>]] <ast.AugAssign object at 0x7da1b026cd90> return[name[msg]]
keyword[def] identifier[format] ( identifier[self] , identifier[exclude_class] = keyword[False] ): literal[string] keyword[if] identifier[exclude_class] : identifier[msg] = identifier[self] . identifier[msg] keyword[else] : identifier[msg] = literal[string] %( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[self] . identifier[msg] ) keyword[if] identifier[len] ( identifier[self] . identifier[params] )!= literal[int] : identifier[paramstring] = literal[string] . identifier[join] ([ identifier[str] ( identifier[key] )+ literal[string] + identifier[str] ( identifier[val] ) keyword[for] identifier[key] , identifier[val] keyword[in] identifier[self] . identifier[params] . identifier[items] ()]) identifier[msg] += literal[string] + identifier[paramstring] keyword[return] identifier[msg]
def format(self, exclude_class=False): """Format this exception as a string including class name. Args: exclude_class (bool): Whether to exclude the exception class name when formatting this exception Returns: string: a multiline string with the message, class name and key value parameters passed to create the exception. """ if exclude_class: msg = self.msg # depends on [control=['if'], data=[]] else: msg = '%s: %s' % (self.__class__.__name__, self.msg) if len(self.params) != 0: paramstring = '\n'.join([str(key) + ': ' + str(val) for (key, val) in self.params.items()]) msg += '\nAdditional Information:\n' + paramstring # depends on [control=['if'], data=[]] return msg
def _create_path(self): """Create the path to hold the database, if one wwas specified.""" if self.driver == 'sqlite' and 'memory' not in self.dsn and self.dsn != 'sqlite://': dir_ = os.path.dirname(self.path) if dir_ and not os.path.exists(dir_): try: # Multiple process may try to make, so it could already # exist os.makedirs(dir_) except Exception: pass if not os.path.exists(dir_): raise Exception("Couldn't create directory " + dir_)
def function[_create_path, parameter[self]]: constant[Create the path to hold the database, if one wwas specified.] if <ast.BoolOp object at 0x7da20c795330> begin[:] variable[dir_] assign[=] call[name[os].path.dirname, parameter[name[self].path]] if <ast.BoolOp object at 0x7da20c795c90> begin[:] <ast.Try object at 0x7da20c794ee0> if <ast.UnaryOp object at 0x7da20c796320> begin[:] <ast.Raise object at 0x7da20c7956c0>
keyword[def] identifier[_create_path] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[driver] == literal[string] keyword[and] literal[string] keyword[not] keyword[in] identifier[self] . identifier[dsn] keyword[and] identifier[self] . identifier[dsn] != literal[string] : identifier[dir_] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[self] . identifier[path] ) keyword[if] identifier[dir_] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dir_] ): keyword[try] : identifier[os] . identifier[makedirs] ( identifier[dir_] ) keyword[except] identifier[Exception] : keyword[pass] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[dir_] ): keyword[raise] identifier[Exception] ( literal[string] + identifier[dir_] )
def _create_path(self): """Create the path to hold the database, if one wwas specified.""" if self.driver == 'sqlite' and 'memory' not in self.dsn and (self.dsn != 'sqlite://'): dir_ = os.path.dirname(self.path) if dir_ and (not os.path.exists(dir_)): try: # Multiple process may try to make, so it could already # exist os.makedirs(dir_) # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] if not os.path.exists(dir_): raise Exception("Couldn't create directory " + dir_) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def process_event(self, event_name: str, data: dict) -> None: """ Process event after epoch Args: event_name: whether event is send after epoch or batch. Set of values: ``"after_epoch", "after_batch"`` data: event data (dictionary) Returns: None """ if event_name == "after_epoch": self.epochs_done = data["epochs_done"] self.batches_seen = data["batches_seen"] self.train_examples_seen = data["train_examples_seen"] return
def function[process_event, parameter[self, event_name, data]]: constant[ Process event after epoch Args: event_name: whether event is send after epoch or batch. Set of values: ``"after_epoch", "after_batch"`` data: event data (dictionary) Returns: None ] if compare[name[event_name] equal[==] constant[after_epoch]] begin[:] name[self].epochs_done assign[=] call[name[data]][constant[epochs_done]] name[self].batches_seen assign[=] call[name[data]][constant[batches_seen]] name[self].train_examples_seen assign[=] call[name[data]][constant[train_examples_seen]] return[None]
keyword[def] identifier[process_event] ( identifier[self] , identifier[event_name] : identifier[str] , identifier[data] : identifier[dict] )-> keyword[None] : literal[string] keyword[if] identifier[event_name] == literal[string] : identifier[self] . identifier[epochs_done] = identifier[data] [ literal[string] ] identifier[self] . identifier[batches_seen] = identifier[data] [ literal[string] ] identifier[self] . identifier[train_examples_seen] = identifier[data] [ literal[string] ] keyword[return]
def process_event(self, event_name: str, data: dict) -> None: """ Process event after epoch Args: event_name: whether event is send after epoch or batch. Set of values: ``"after_epoch", "after_batch"`` data: event data (dictionary) Returns: None """ if event_name == 'after_epoch': self.epochs_done = data['epochs_done'] self.batches_seen = data['batches_seen'] self.train_examples_seen = data['train_examples_seen'] # depends on [control=['if'], data=[]] return
def obfn_fvar(self): """Variable to be evaluated in computing regularisation term, depending on 'fEvalX' option value. """ if self.opt['fEvalX']: return self.X else: return self.cnst_c() - self.cnst_B(self.Y)
def function[obfn_fvar, parameter[self]]: constant[Variable to be evaluated in computing regularisation term, depending on 'fEvalX' option value. ] if call[name[self].opt][constant[fEvalX]] begin[:] return[name[self].X]
keyword[def] identifier[obfn_fvar] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[opt] [ literal[string] ]: keyword[return] identifier[self] . identifier[X] keyword[else] : keyword[return] identifier[self] . identifier[cnst_c] ()- identifier[self] . identifier[cnst_B] ( identifier[self] . identifier[Y] )
def obfn_fvar(self): """Variable to be evaluated in computing regularisation term, depending on 'fEvalX' option value. """ if self.opt['fEvalX']: return self.X # depends on [control=['if'], data=[]] else: return self.cnst_c() - self.cnst_B(self.Y)
def find_path_package_name(thepath): """ Takes a file system path and returns the name of the python package the said path belongs to. If the said path can not be determined, it returns None. """ module_found = False last_module_found = None continue_ = True while continue_: module_found = is_path_python_module(thepath) next_path = path.dirname(thepath) if next_path == thepath: continue_ = False if module_found: init_names = ['__init__%s' % suffix.lower() for suffix in _py_suffixes] if path.basename(thepath).lower() in init_names: last_module_found = path.basename(path.dirname(thepath)) else: last_module_found = path.basename(thepath) if last_module_found and not module_found: continue_ = False thepath = next_path return last_module_found
def function[find_path_package_name, parameter[thepath]]: constant[ Takes a file system path and returns the name of the python package the said path belongs to. If the said path can not be determined, it returns None. ] variable[module_found] assign[=] constant[False] variable[last_module_found] assign[=] constant[None] variable[continue_] assign[=] constant[True] while name[continue_] begin[:] variable[module_found] assign[=] call[name[is_path_python_module], parameter[name[thepath]]] variable[next_path] assign[=] call[name[path].dirname, parameter[name[thepath]]] if compare[name[next_path] equal[==] name[thepath]] begin[:] variable[continue_] assign[=] constant[False] if name[module_found] begin[:] variable[init_names] assign[=] <ast.ListComp object at 0x7da20e9541f0> if compare[call[call[name[path].basename, parameter[name[thepath]]].lower, parameter[]] in name[init_names]] begin[:] variable[last_module_found] assign[=] call[name[path].basename, parameter[call[name[path].dirname, parameter[name[thepath]]]]] if <ast.BoolOp object at 0x7da20e957c10> begin[:] variable[continue_] assign[=] constant[False] variable[thepath] assign[=] name[next_path] return[name[last_module_found]]
keyword[def] identifier[find_path_package_name] ( identifier[thepath] ): literal[string] identifier[module_found] = keyword[False] identifier[last_module_found] = keyword[None] identifier[continue_] = keyword[True] keyword[while] identifier[continue_] : identifier[module_found] = identifier[is_path_python_module] ( identifier[thepath] ) identifier[next_path] = identifier[path] . identifier[dirname] ( identifier[thepath] ) keyword[if] identifier[next_path] == identifier[thepath] : identifier[continue_] = keyword[False] keyword[if] identifier[module_found] : identifier[init_names] =[ literal[string] % identifier[suffix] . identifier[lower] () keyword[for] identifier[suffix] keyword[in] identifier[_py_suffixes] ] keyword[if] identifier[path] . identifier[basename] ( identifier[thepath] ). identifier[lower] () keyword[in] identifier[init_names] : identifier[last_module_found] = identifier[path] . identifier[basename] ( identifier[path] . identifier[dirname] ( identifier[thepath] )) keyword[else] : identifier[last_module_found] = identifier[path] . identifier[basename] ( identifier[thepath] ) keyword[if] identifier[last_module_found] keyword[and] keyword[not] identifier[module_found] : identifier[continue_] = keyword[False] identifier[thepath] = identifier[next_path] keyword[return] identifier[last_module_found]
def find_path_package_name(thepath): """ Takes a file system path and returns the name of the python package the said path belongs to. If the said path can not be determined, it returns None. """ module_found = False last_module_found = None continue_ = True while continue_: module_found = is_path_python_module(thepath) next_path = path.dirname(thepath) if next_path == thepath: continue_ = False # depends on [control=['if'], data=[]] if module_found: init_names = ['__init__%s' % suffix.lower() for suffix in _py_suffixes] if path.basename(thepath).lower() in init_names: last_module_found = path.basename(path.dirname(thepath)) # depends on [control=['if'], data=[]] else: last_module_found = path.basename(thepath) # depends on [control=['if'], data=[]] if last_module_found and (not module_found): continue_ = False # depends on [control=['if'], data=[]] thepath = next_path # depends on [control=['while'], data=[]] return last_module_found
def main(**kwargs): """ Entry point for dx-build-app(let). Don't call this function as a subroutine in your program! It is liable to sys.exit your program when it detects certain error conditions, so you can't recover from those as you could if it raised exceptions. Instead, call dx_build_app.build_and_upload_locally which provides the real implementation for dx-build-app(let) but is easier to use in your program. """ if len(sys.argv) > 0: if sys.argv[0].endswith('dx-build-app'): logging.warn('Warning: dx-build-app has been replaced with "dx build --create-app". Please update your scripts.') elif sys.argv[0].endswith('dx-build-applet'): logging.warn('Warning: dx-build-applet has been replaced with "dx build". Please update your scripts.') exit(0)
def function[main, parameter[]]: constant[ Entry point for dx-build-app(let). Don't call this function as a subroutine in your program! It is liable to sys.exit your program when it detects certain error conditions, so you can't recover from those as you could if it raised exceptions. Instead, call dx_build_app.build_and_upload_locally which provides the real implementation for dx-build-app(let) but is easier to use in your program. ] if compare[call[name[len], parameter[name[sys].argv]] greater[>] constant[0]] begin[:] if call[call[name[sys].argv][constant[0]].endswith, parameter[constant[dx-build-app]]] begin[:] call[name[logging].warn, parameter[constant[Warning: dx-build-app has been replaced with "dx build --create-app". Please update your scripts.]]] call[name[exit], parameter[constant[0]]]
keyword[def] identifier[main] (** identifier[kwargs] ): literal[string] keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )> literal[int] : keyword[if] identifier[sys] . identifier[argv] [ literal[int] ]. identifier[endswith] ( literal[string] ): identifier[logging] . identifier[warn] ( literal[string] ) keyword[elif] identifier[sys] . identifier[argv] [ literal[int] ]. identifier[endswith] ( literal[string] ): identifier[logging] . identifier[warn] ( literal[string] ) identifier[exit] ( literal[int] )
def main(**kwargs): """ Entry point for dx-build-app(let). Don't call this function as a subroutine in your program! It is liable to sys.exit your program when it detects certain error conditions, so you can't recover from those as you could if it raised exceptions. Instead, call dx_build_app.build_and_upload_locally which provides the real implementation for dx-build-app(let) but is easier to use in your program. """ if len(sys.argv) > 0: if sys.argv[0].endswith('dx-build-app'): logging.warn('Warning: dx-build-app has been replaced with "dx build --create-app". Please update your scripts.') # depends on [control=['if'], data=[]] elif sys.argv[0].endswith('dx-build-applet'): logging.warn('Warning: dx-build-applet has been replaced with "dx build". Please update your scripts.') # depends on [control=['if'], data=[]] exit(0) # depends on [control=['if'], data=[]]
def _einsum_equation(input_shapes, output_shape): """Turn shapes into an einsum equation. e.g. "ij,jk->ik" Args: input_shapes: a list of Shapes output_shape: a Shape Returns: a string """ ret = [] next_letter = ord("a") dim_to_letter = {} for shape_num, shape in enumerate(input_shapes + [output_shape]): if shape_num == len(input_shapes): ret.append("->") elif shape_num > 0: ret.append(",") for d in shape.dims: if d not in dim_to_letter: dim_to_letter[d] = chr(next_letter) next_letter += 1 ret.append(dim_to_letter[d]) return "".join(ret)
def function[_einsum_equation, parameter[input_shapes, output_shape]]: constant[Turn shapes into an einsum equation. e.g. "ij,jk->ik" Args: input_shapes: a list of Shapes output_shape: a Shape Returns: a string ] variable[ret] assign[=] list[[]] variable[next_letter] assign[=] call[name[ord], parameter[constant[a]]] variable[dim_to_letter] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20c6c5240>, <ast.Name object at 0x7da20c6c5f00>]]] in starred[call[name[enumerate], parameter[binary_operation[name[input_shapes] + list[[<ast.Name object at 0x7da20c6c4460>]]]]]] begin[:] if compare[name[shape_num] equal[==] call[name[len], parameter[name[input_shapes]]]] begin[:] call[name[ret].append, parameter[constant[->]]] for taget[name[d]] in starred[name[shape].dims] begin[:] if compare[name[d] <ast.NotIn object at 0x7da2590d7190> name[dim_to_letter]] begin[:] call[name[dim_to_letter]][name[d]] assign[=] call[name[chr], parameter[name[next_letter]]] <ast.AugAssign object at 0x7da20c6c6f20> call[name[ret].append, parameter[call[name[dim_to_letter]][name[d]]]] return[call[constant[].join, parameter[name[ret]]]]
keyword[def] identifier[_einsum_equation] ( identifier[input_shapes] , identifier[output_shape] ): literal[string] identifier[ret] =[] identifier[next_letter] = identifier[ord] ( literal[string] ) identifier[dim_to_letter] ={} keyword[for] identifier[shape_num] , identifier[shape] keyword[in] identifier[enumerate] ( identifier[input_shapes] +[ identifier[output_shape] ]): keyword[if] identifier[shape_num] == identifier[len] ( identifier[input_shapes] ): identifier[ret] . identifier[append] ( literal[string] ) keyword[elif] identifier[shape_num] > literal[int] : identifier[ret] . identifier[append] ( literal[string] ) keyword[for] identifier[d] keyword[in] identifier[shape] . identifier[dims] : keyword[if] identifier[d] keyword[not] keyword[in] identifier[dim_to_letter] : identifier[dim_to_letter] [ identifier[d] ]= identifier[chr] ( identifier[next_letter] ) identifier[next_letter] += literal[int] identifier[ret] . identifier[append] ( identifier[dim_to_letter] [ identifier[d] ]) keyword[return] literal[string] . identifier[join] ( identifier[ret] )
def _einsum_equation(input_shapes, output_shape): """Turn shapes into an einsum equation. e.g. "ij,jk->ik" Args: input_shapes: a list of Shapes output_shape: a Shape Returns: a string """ ret = [] next_letter = ord('a') dim_to_letter = {} for (shape_num, shape) in enumerate(input_shapes + [output_shape]): if shape_num == len(input_shapes): ret.append('->') # depends on [control=['if'], data=[]] elif shape_num > 0: ret.append(',') # depends on [control=['if'], data=[]] for d in shape.dims: if d not in dim_to_letter: dim_to_letter[d] = chr(next_letter) next_letter += 1 # depends on [control=['if'], data=['d', 'dim_to_letter']] ret.append(dim_to_letter[d]) # depends on [control=['for'], data=['d']] # depends on [control=['for'], data=[]] return ''.join(ret)
def benchmark_gops(N, gates, reps): """Return benchmark performance in GOPS (Gate operations per second)""" t = timeit.timeit(lambda: benchmark(N, gates), number=reps) gops = (GATES*REPS)/t gops = int((gops * 100) + 0.5) / 100.0 return gops
def function[benchmark_gops, parameter[N, gates, reps]]: constant[Return benchmark performance in GOPS (Gate operations per second)] variable[t] assign[=] call[name[timeit].timeit, parameter[<ast.Lambda object at 0x7da18f7214e0>]] variable[gops] assign[=] binary_operation[binary_operation[name[GATES] * name[REPS]] / name[t]] variable[gops] assign[=] binary_operation[call[name[int], parameter[binary_operation[binary_operation[name[gops] * constant[100]] + constant[0.5]]]] / constant[100.0]] return[name[gops]]
keyword[def] identifier[benchmark_gops] ( identifier[N] , identifier[gates] , identifier[reps] ): literal[string] identifier[t] = identifier[timeit] . identifier[timeit] ( keyword[lambda] : identifier[benchmark] ( identifier[N] , identifier[gates] ), identifier[number] = identifier[reps] ) identifier[gops] =( identifier[GATES] * identifier[REPS] )/ identifier[t] identifier[gops] = identifier[int] (( identifier[gops] * literal[int] )+ literal[int] )/ literal[int] keyword[return] identifier[gops]
def benchmark_gops(N, gates, reps): """Return benchmark performance in GOPS (Gate operations per second)""" t = timeit.timeit(lambda : benchmark(N, gates), number=reps) gops = GATES * REPS / t gops = int(gops * 100 + 0.5) / 100.0 return gops
def _generic_definefont_parser(self, obj): """A generic parser for several DefineFontX.""" obj.FontID = unpack_ui16(self._src) bc = BitConsumer(self._src) obj.FontFlagsHasLayout = bc.u_get(1) obj.FontFlagsShiftJIS = bc.u_get(1) obj.FontFlagsSmallText = bc.u_get(1) obj.FontFlagsANSI = bc.u_get(1) obj.FontFlagsWideOffsets = bc.u_get(1) obj.FontFlagsWideCodes = bc.u_get(1) obj.FontFlagsItalic = bc.u_get(1) obj.FontFlagsBold = bc.u_get(1) obj.LanguageCode = self._get_struct_langcode() obj.FontNameLen = unpack_ui8(self._src) obj.FontName = "".join(chr(unpack_ui8(self._src)) for i in range(obj.FontNameLen)) if obj.FontName[-1] == '\x00': # most probably ends in null, clean it obj.FontName = obj.FontName[:-1] obj.NumGlyphs = num_glyphs = unpack_ui16(self._src) self._last_defined_glyphs_quantity = num_glyphs getter_wide = unpack_ui32 if obj.FontFlagsWideOffsets else unpack_ui16 obj.OffsetTable = [getter_wide(self._src) for _ in range(num_glyphs)] obj.CodeTableOffset = getter_wide(self._src) obj.GlyphShapeTable = [self._get_struct_shape() for _ in range(num_glyphs)] obj.CodeTable = [unpack_ui16(self._src) for _ in range(num_glyphs)] if obj.FontFlagsHasLayout: obj.FontAscent = unpack_ui16(self._src) obj.FontDecent = unpack_ui16(self._src) obj.FontLeading = unpack_ui16(self._src) obj.FontAdvanceTable = [unpack_si16(self._src) for _ in range(num_glyphs)] obj.FontBoundsTable = [self._get_struct_rect() for _ in range(num_glyphs)] obj.KerningCount = unpack_ui16(self._src) obj.FontKerningTable = [ self._get_struct_kerningrecord(obj.FontFlagsWideCodes) for _ in range(obj.KerningCount)]
def function[_generic_definefont_parser, parameter[self, obj]]: constant[A generic parser for several DefineFontX.] name[obj].FontID assign[=] call[name[unpack_ui16], parameter[name[self]._src]] variable[bc] assign[=] call[name[BitConsumer], parameter[name[self]._src]] name[obj].FontFlagsHasLayout assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsShiftJIS assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsSmallText assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsANSI assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsWideOffsets assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsWideCodes assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsItalic assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].FontFlagsBold assign[=] call[name[bc].u_get, parameter[constant[1]]] name[obj].LanguageCode assign[=] call[name[self]._get_struct_langcode, parameter[]] name[obj].FontNameLen assign[=] call[name[unpack_ui8], parameter[name[self]._src]] name[obj].FontName assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20c6a8e50>]] if compare[call[name[obj].FontName][<ast.UnaryOp object at 0x7da20c6aaf50>] equal[==] constant[]] begin[:] name[obj].FontName assign[=] call[name[obj].FontName][<ast.Slice object at 0x7da20c6ab850>] name[obj].NumGlyphs assign[=] call[name[unpack_ui16], parameter[name[self]._src]] name[self]._last_defined_glyphs_quantity assign[=] name[num_glyphs] variable[getter_wide] assign[=] <ast.IfExp object at 0x7da20c6aa3b0> name[obj].OffsetTable assign[=] <ast.ListComp object at 0x7da20c6a8460> name[obj].CodeTableOffset assign[=] call[name[getter_wide], parameter[name[self]._src]] name[obj].GlyphShapeTable assign[=] <ast.ListComp object at 0x7da20c6aafe0> name[obj].CodeTable assign[=] <ast.ListComp object at 0x7da20c6ab3a0> if name[obj].FontFlagsHasLayout begin[:] name[obj].FontAscent assign[=] call[name[unpack_ui16], parameter[name[self]._src]] name[obj].FontDecent assign[=] call[name[unpack_ui16], parameter[name[self]._src]] name[obj].FontLeading assign[=] call[name[unpack_ui16], parameter[name[self]._src]] name[obj].FontAdvanceTable assign[=] <ast.ListComp object at 0x7da20c6aa560> name[obj].FontBoundsTable assign[=] <ast.ListComp object at 0x7da20c6a9420> name[obj].KerningCount assign[=] call[name[unpack_ui16], parameter[name[self]._src]] name[obj].FontKerningTable assign[=] <ast.ListComp object at 0x7da20c6a9ab0>
keyword[def] identifier[_generic_definefont_parser] ( identifier[self] , identifier[obj] ): literal[string] identifier[obj] . identifier[FontID] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) identifier[bc] = identifier[BitConsumer] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[FontFlagsHasLayout] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsShiftJIS] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsSmallText] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsANSI] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsWideOffsets] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsWideCodes] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsItalic] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[FontFlagsBold] = identifier[bc] . identifier[u_get] ( literal[int] ) identifier[obj] . identifier[LanguageCode] = identifier[self] . identifier[_get_struct_langcode] () identifier[obj] . identifier[FontNameLen] = identifier[unpack_ui8] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[FontName] = literal[string] . identifier[join] ( identifier[chr] ( identifier[unpack_ui8] ( identifier[self] . identifier[_src] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[obj] . identifier[FontNameLen] )) keyword[if] identifier[obj] . identifier[FontName] [- literal[int] ]== literal[string] : identifier[obj] . identifier[FontName] = identifier[obj] . identifier[FontName] [:- literal[int] ] identifier[obj] . identifier[NumGlyphs] = identifier[num_glyphs] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) identifier[self] . identifier[_last_defined_glyphs_quantity] = identifier[num_glyphs] identifier[getter_wide] = identifier[unpack_ui32] keyword[if] identifier[obj] . identifier[FontFlagsWideOffsets] keyword[else] identifier[unpack_ui16] identifier[obj] . identifier[OffsetTable] =[ identifier[getter_wide] ( identifier[self] . identifier[_src] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_glyphs] )] identifier[obj] . identifier[CodeTableOffset] = identifier[getter_wide] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[GlyphShapeTable] =[ identifier[self] . identifier[_get_struct_shape] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_glyphs] )] identifier[obj] . identifier[CodeTable] =[ identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_glyphs] )] keyword[if] identifier[obj] . identifier[FontFlagsHasLayout] : identifier[obj] . identifier[FontAscent] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[FontDecent] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[FontLeading] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[FontAdvanceTable] =[ identifier[unpack_si16] ( identifier[self] . identifier[_src] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_glyphs] )] identifier[obj] . identifier[FontBoundsTable] =[ identifier[self] . identifier[_get_struct_rect] () keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_glyphs] )] identifier[obj] . identifier[KerningCount] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) identifier[obj] . identifier[FontKerningTable] =[ identifier[self] . identifier[_get_struct_kerningrecord] ( identifier[obj] . identifier[FontFlagsWideCodes] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[obj] . identifier[KerningCount] )]
def _generic_definefont_parser(self, obj): """A generic parser for several DefineFontX.""" obj.FontID = unpack_ui16(self._src) bc = BitConsumer(self._src) obj.FontFlagsHasLayout = bc.u_get(1) obj.FontFlagsShiftJIS = bc.u_get(1) obj.FontFlagsSmallText = bc.u_get(1) obj.FontFlagsANSI = bc.u_get(1) obj.FontFlagsWideOffsets = bc.u_get(1) obj.FontFlagsWideCodes = bc.u_get(1) obj.FontFlagsItalic = bc.u_get(1) obj.FontFlagsBold = bc.u_get(1) obj.LanguageCode = self._get_struct_langcode() obj.FontNameLen = unpack_ui8(self._src) obj.FontName = ''.join((chr(unpack_ui8(self._src)) for i in range(obj.FontNameLen))) if obj.FontName[-1] == '\x00': # most probably ends in null, clean it obj.FontName = obj.FontName[:-1] # depends on [control=['if'], data=[]] obj.NumGlyphs = num_glyphs = unpack_ui16(self._src) self._last_defined_glyphs_quantity = num_glyphs getter_wide = unpack_ui32 if obj.FontFlagsWideOffsets else unpack_ui16 obj.OffsetTable = [getter_wide(self._src) for _ in range(num_glyphs)] obj.CodeTableOffset = getter_wide(self._src) obj.GlyphShapeTable = [self._get_struct_shape() for _ in range(num_glyphs)] obj.CodeTable = [unpack_ui16(self._src) for _ in range(num_glyphs)] if obj.FontFlagsHasLayout: obj.FontAscent = unpack_ui16(self._src) obj.FontDecent = unpack_ui16(self._src) obj.FontLeading = unpack_ui16(self._src) obj.FontAdvanceTable = [unpack_si16(self._src) for _ in range(num_glyphs)] obj.FontBoundsTable = [self._get_struct_rect() for _ in range(num_glyphs)] obj.KerningCount = unpack_ui16(self._src) obj.FontKerningTable = [self._get_struct_kerningrecord(obj.FontFlagsWideCodes) for _ in range(obj.KerningCount)] # depends on [control=['if'], data=[]]
def order_replicant_volume(self, volume_id, snapshot_schedule, location, tier=None, os_type=None): """Places an order for a replicant block volume. :param volume_id: The ID of the primary volume to be replicated :param snapshot_schedule: The primary volume's snapshot schedule to use for replication :param location: The location for the ordered replicant volume :param tier: The tier (IOPS per GB) of the primary volume :param os_type: The OS type of the primary volume :return: Returns a SoftLayer_Container_Product_Order_Receipt """ block_mask = 'billingItem[activeChildren,hourlyFlag],'\ 'storageTierLevel,osType,staasVersion,'\ 'hasEncryptionAtRest,snapshotCapacityGb,schedules,'\ 'intervalSchedule,hourlySchedule,dailySchedule,'\ 'weeklySchedule,storageType[keyName],provisionedIops' block_volume = self.get_block_volume_details(volume_id, mask=block_mask) if os_type is None: if isinstance(utils.lookup(block_volume, 'osType', 'keyName'), str): os_type = block_volume['osType']['keyName'] else: raise exceptions.SoftLayerError( "Cannot find primary volume's os-type " "automatically; must specify manually") order = storage_utils.prepare_replicant_order_object( self, snapshot_schedule, location, tier, block_volume, 'block' ) order['osFormatType'] = {'keyName': os_type} return self.client.call('Product_Order', 'placeOrder', order)
def function[order_replicant_volume, parameter[self, volume_id, snapshot_schedule, location, tier, os_type]]: constant[Places an order for a replicant block volume. :param volume_id: The ID of the primary volume to be replicated :param snapshot_schedule: The primary volume's snapshot schedule to use for replication :param location: The location for the ordered replicant volume :param tier: The tier (IOPS per GB) of the primary volume :param os_type: The OS type of the primary volume :return: Returns a SoftLayer_Container_Product_Order_Receipt ] variable[block_mask] assign[=] constant[billingItem[activeChildren,hourlyFlag],storageTierLevel,osType,staasVersion,hasEncryptionAtRest,snapshotCapacityGb,schedules,intervalSchedule,hourlySchedule,dailySchedule,weeklySchedule,storageType[keyName],provisionedIops] variable[block_volume] assign[=] call[name[self].get_block_volume_details, parameter[name[volume_id]]] if compare[name[os_type] is constant[None]] begin[:] if call[name[isinstance], parameter[call[name[utils].lookup, parameter[name[block_volume], constant[osType], constant[keyName]]], name[str]]] begin[:] variable[os_type] assign[=] call[call[name[block_volume]][constant[osType]]][constant[keyName]] variable[order] assign[=] call[name[storage_utils].prepare_replicant_order_object, parameter[name[self], name[snapshot_schedule], name[location], name[tier], name[block_volume], constant[block]]] call[name[order]][constant[osFormatType]] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c9090>], [<ast.Name object at 0x7da20c7ca140>]] return[call[name[self].client.call, parameter[constant[Product_Order], constant[placeOrder], name[order]]]]
keyword[def] identifier[order_replicant_volume] ( identifier[self] , identifier[volume_id] , identifier[snapshot_schedule] , identifier[location] , identifier[tier] = keyword[None] , identifier[os_type] = keyword[None] ): literal[string] identifier[block_mask] = literal[string] literal[string] literal[string] literal[string] literal[string] identifier[block_volume] = identifier[self] . identifier[get_block_volume_details] ( identifier[volume_id] , identifier[mask] = identifier[block_mask] ) keyword[if] identifier[os_type] keyword[is] keyword[None] : keyword[if] identifier[isinstance] ( identifier[utils] . identifier[lookup] ( identifier[block_volume] , literal[string] , literal[string] ), identifier[str] ): identifier[os_type] = identifier[block_volume] [ literal[string] ][ literal[string] ] keyword[else] : keyword[raise] identifier[exceptions] . identifier[SoftLayerError] ( literal[string] literal[string] ) identifier[order] = identifier[storage_utils] . identifier[prepare_replicant_order_object] ( identifier[self] , identifier[snapshot_schedule] , identifier[location] , identifier[tier] , identifier[block_volume] , literal[string] ) identifier[order] [ literal[string] ]={ literal[string] : identifier[os_type] } keyword[return] identifier[self] . identifier[client] . identifier[call] ( literal[string] , literal[string] , identifier[order] )
def order_replicant_volume(self, volume_id, snapshot_schedule, location, tier=None, os_type=None): """Places an order for a replicant block volume. :param volume_id: The ID of the primary volume to be replicated :param snapshot_schedule: The primary volume's snapshot schedule to use for replication :param location: The location for the ordered replicant volume :param tier: The tier (IOPS per GB) of the primary volume :param os_type: The OS type of the primary volume :return: Returns a SoftLayer_Container_Product_Order_Receipt """ block_mask = 'billingItem[activeChildren,hourlyFlag],storageTierLevel,osType,staasVersion,hasEncryptionAtRest,snapshotCapacityGb,schedules,intervalSchedule,hourlySchedule,dailySchedule,weeklySchedule,storageType[keyName],provisionedIops' block_volume = self.get_block_volume_details(volume_id, mask=block_mask) if os_type is None: if isinstance(utils.lookup(block_volume, 'osType', 'keyName'), str): os_type = block_volume['osType']['keyName'] # depends on [control=['if'], data=[]] else: raise exceptions.SoftLayerError("Cannot find primary volume's os-type automatically; must specify manually") # depends on [control=['if'], data=['os_type']] order = storage_utils.prepare_replicant_order_object(self, snapshot_schedule, location, tier, block_volume, 'block') order['osFormatType'] = {'keyName': os_type} return self.client.call('Product_Order', 'placeOrder', order)
def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() else: return catted
def function[merge, parameter[bedfiles]]: constant[ given a BED file or list of BED files merge them an return a bedtools object ] if call[name[isinstance], parameter[name[bedfiles], name[list]]] begin[:] variable[catted] assign[=] call[name[concat], parameter[name[bedfiles]]] if name[catted] begin[:] return[call[call[call[name[concat], parameter[name[bedfiles]]].sort, parameter[]].merge, parameter[]]]
keyword[def] identifier[merge] ( identifier[bedfiles] ): literal[string] keyword[if] identifier[isinstance] ( identifier[bedfiles] , identifier[list] ): identifier[catted] = identifier[concat] ( identifier[bedfiles] ) keyword[else] : identifier[catted] = identifier[concat] ([ identifier[bedfiles] ]) keyword[if] identifier[catted] : keyword[return] identifier[concat] ( identifier[bedfiles] ). identifier[sort] (). identifier[merge] () keyword[else] : keyword[return] identifier[catted]
def merge(bedfiles): """ given a BED file or list of BED files merge them an return a bedtools object """ if isinstance(bedfiles, list): catted = concat(bedfiles) # depends on [control=['if'], data=[]] else: catted = concat([bedfiles]) if catted: return concat(bedfiles).sort().merge() # depends on [control=['if'], data=[]] else: return catted
def load_conf(cfg_path): """ Try to load the given conf file. """ global config try: cfg = open(cfg_path, 'r') except Exception as ex: if verbose: print("Unable to open {0}".format(cfg_path)) print(str(ex)) return False # Read the entire contents of the conf file cfg_json = cfg.read() cfg.close() # print(cfg_json) # Try to parse the conf file into a Python structure try: config = json.loads(cfg_json) except Exception as ex: print("Unable to parse configuration file as JSON") print(str(ex)) return False # This config was successfully loaded return True
def function[load_conf, parameter[cfg_path]]: constant[ Try to load the given conf file. ] <ast.Global object at 0x7da20c7c8370> <ast.Try object at 0x7da20c7ca290> variable[cfg_json] assign[=] call[name[cfg].read, parameter[]] call[name[cfg].close, parameter[]] <ast.Try object at 0x7da20c7cb0a0> return[constant[True]]
keyword[def] identifier[load_conf] ( identifier[cfg_path] ): literal[string] keyword[global] identifier[config] keyword[try] : identifier[cfg] = identifier[open] ( identifier[cfg_path] , literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : keyword[if] identifier[verbose] : identifier[print] ( literal[string] . identifier[format] ( identifier[cfg_path] )) identifier[print] ( identifier[str] ( identifier[ex] )) keyword[return] keyword[False] identifier[cfg_json] = identifier[cfg] . identifier[read] () identifier[cfg] . identifier[close] () keyword[try] : identifier[config] = identifier[json] . identifier[loads] ( identifier[cfg_json] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[print] ( literal[string] ) identifier[print] ( identifier[str] ( identifier[ex] )) keyword[return] keyword[False] keyword[return] keyword[True]
def load_conf(cfg_path): """ Try to load the given conf file. """ global config try: cfg = open(cfg_path, 'r') # depends on [control=['try'], data=[]] except Exception as ex: if verbose: print('Unable to open {0}'.format(cfg_path)) print(str(ex)) # depends on [control=['if'], data=[]] return False # depends on [control=['except'], data=['ex']] # Read the entire contents of the conf file cfg_json = cfg.read() cfg.close() # print(cfg_json) # Try to parse the conf file into a Python structure try: config = json.loads(cfg_json) # depends on [control=['try'], data=[]] except Exception as ex: print('Unable to parse configuration file as JSON') print(str(ex)) return False # depends on [control=['except'], data=['ex']] # This config was successfully loaded return True
def update_time_range(form_data): """Move since and until to time_range.""" if 'since' in form_data or 'until' in form_data: form_data['time_range'] = '{} : {}'.format( form_data.pop('since', '') or '', form_data.pop('until', '') or '', )
def function[update_time_range, parameter[form_data]]: constant[Move since and until to time_range.] if <ast.BoolOp object at 0x7da1b1e13820> begin[:] call[name[form_data]][constant[time_range]] assign[=] call[constant[{} : {}].format, parameter[<ast.BoolOp object at 0x7da1b20f9120>, <ast.BoolOp object at 0x7da1b20f82b0>]]
keyword[def] identifier[update_time_range] ( identifier[form_data] ): literal[string] keyword[if] literal[string] keyword[in] identifier[form_data] keyword[or] literal[string] keyword[in] identifier[form_data] : identifier[form_data] [ literal[string] ]= literal[string] . identifier[format] ( identifier[form_data] . identifier[pop] ( literal[string] , literal[string] ) keyword[or] literal[string] , identifier[form_data] . identifier[pop] ( literal[string] , literal[string] ) keyword[or] literal[string] , )
def update_time_range(form_data): """Move since and until to time_range.""" if 'since' in form_data or 'until' in form_data: form_data['time_range'] = '{} : {}'.format(form_data.pop('since', '') or '', form_data.pop('until', '') or '') # depends on [control=['if'], data=[]]
def setHighQualityOverlay(self, ulOverlayHandle): """ Specify which overlay to use the high quality render path. This overlay will be composited in during the distortion pass which results in it drawing on top of everything else, but also at a higher quality as it samples the source texture directly rather than rasterizing into each eye's render texture first. Because if this, only one of these is supported at any given time. It is most useful for overlays that are expected to take up most of the user's view (e.g. streaming video). This mode does not support mouse input to your overlay. """ fn = self.function_table.setHighQualityOverlay result = fn(ulOverlayHandle) return result
def function[setHighQualityOverlay, parameter[self, ulOverlayHandle]]: constant[ Specify which overlay to use the high quality render path. This overlay will be composited in during the distortion pass which results in it drawing on top of everything else, but also at a higher quality as it samples the source texture directly rather than rasterizing into each eye's render texture first. Because if this, only one of these is supported at any given time. It is most useful for overlays that are expected to take up most of the user's view (e.g. streaming video). This mode does not support mouse input to your overlay. ] variable[fn] assign[=] name[self].function_table.setHighQualityOverlay variable[result] assign[=] call[name[fn], parameter[name[ulOverlayHandle]]] return[name[result]]
keyword[def] identifier[setHighQualityOverlay] ( identifier[self] , identifier[ulOverlayHandle] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[setHighQualityOverlay] identifier[result] = identifier[fn] ( identifier[ulOverlayHandle] ) keyword[return] identifier[result]
def setHighQualityOverlay(self, ulOverlayHandle): """ Specify which overlay to use the high quality render path. This overlay will be composited in during the distortion pass which results in it drawing on top of everything else, but also at a higher quality as it samples the source texture directly rather than rasterizing into each eye's render texture first. Because if this, only one of these is supported at any given time. It is most useful for overlays that are expected to take up most of the user's view (e.g. streaming video). This mode does not support mouse input to your overlay. """ fn = self.function_table.setHighQualityOverlay result = fn(ulOverlayHandle) return result
def status(self, status_id, raise_exception_on_failure=False): """Return the status of the generation job.""" query = {"output": "json", "user_credentials": self.api_key} resp = requests.get( "%sstatus/%s" % (self._url, status_id), params=query, timeout=self._timeout ) if raise_exception_on_failure and resp.status_code != 200: raise DocumentStatusFailure(resp.content, resp.status_code) if resp.status_code == 200: as_json = json.loads(resp.content) if as_json["status"] == "completed": as_json["download_key"] = _get_download_key(as_json["download_url"]) return as_json return resp
def function[status, parameter[self, status_id, raise_exception_on_failure]]: constant[Return the status of the generation job.] variable[query] assign[=] dictionary[[<ast.Constant object at 0x7da20c76d3c0>, <ast.Constant object at 0x7da20c76cb80>], [<ast.Constant object at 0x7da20c76f460>, <ast.Attribute object at 0x7da20c76ef50>]] variable[resp] assign[=] call[name[requests].get, parameter[binary_operation[constant[%sstatus/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c76f010>, <ast.Name object at 0x7da20c76c3a0>]]]]] if <ast.BoolOp object at 0x7da20c76fd90> begin[:] <ast.Raise object at 0x7da20c76ead0> if compare[name[resp].status_code equal[==] constant[200]] begin[:] variable[as_json] assign[=] call[name[json].loads, parameter[name[resp].content]] if compare[call[name[as_json]][constant[status]] equal[==] constant[completed]] begin[:] call[name[as_json]][constant[download_key]] assign[=] call[name[_get_download_key], parameter[call[name[as_json]][constant[download_url]]]] return[name[as_json]] return[name[resp]]
keyword[def] identifier[status] ( identifier[self] , identifier[status_id] , identifier[raise_exception_on_failure] = keyword[False] ): literal[string] identifier[query] ={ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[api_key] } identifier[resp] = identifier[requests] . identifier[get] ( literal[string] %( identifier[self] . identifier[_url] , identifier[status_id] ), identifier[params] = identifier[query] , identifier[timeout] = identifier[self] . identifier[_timeout] ) keyword[if] identifier[raise_exception_on_failure] keyword[and] identifier[resp] . identifier[status_code] != literal[int] : keyword[raise] identifier[DocumentStatusFailure] ( identifier[resp] . identifier[content] , identifier[resp] . identifier[status_code] ) keyword[if] identifier[resp] . identifier[status_code] == literal[int] : identifier[as_json] = identifier[json] . identifier[loads] ( identifier[resp] . identifier[content] ) keyword[if] identifier[as_json] [ literal[string] ]== literal[string] : identifier[as_json] [ literal[string] ]= identifier[_get_download_key] ( identifier[as_json] [ literal[string] ]) keyword[return] identifier[as_json] keyword[return] identifier[resp]
def status(self, status_id, raise_exception_on_failure=False): """Return the status of the generation job.""" query = {'output': 'json', 'user_credentials': self.api_key} resp = requests.get('%sstatus/%s' % (self._url, status_id), params=query, timeout=self._timeout) if raise_exception_on_failure and resp.status_code != 200: raise DocumentStatusFailure(resp.content, resp.status_code) # depends on [control=['if'], data=[]] if resp.status_code == 200: as_json = json.loads(resp.content) if as_json['status'] == 'completed': as_json['download_key'] = _get_download_key(as_json['download_url']) # depends on [control=['if'], data=[]] return as_json # depends on [control=['if'], data=[]] return resp
def module_name(self, jamfile_location): """Returns the name of module corresponding to 'jamfile-location'. If no module corresponds to location yet, associates default module name with that location.""" assert isinstance(jamfile_location, basestring) module = self.location2module.get(jamfile_location) if not module: # Root the path, so that locations are always umbiguious. # Without this, we can't decide if '../../exe/program1' and '.' # are the same paths, or not. jamfile_location = os.path.realpath( os.path.join(os.getcwd(), jamfile_location)) module = "Jamfile<%s>" % jamfile_location self.location2module[jamfile_location] = module return module
def function[module_name, parameter[self, jamfile_location]]: constant[Returns the name of module corresponding to 'jamfile-location'. If no module corresponds to location yet, associates default module name with that location.] assert[call[name[isinstance], parameter[name[jamfile_location], name[basestring]]]] variable[module] assign[=] call[name[self].location2module.get, parameter[name[jamfile_location]]] if <ast.UnaryOp object at 0x7da1b203cbb0> begin[:] variable[jamfile_location] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.join, parameter[call[name[os].getcwd, parameter[]], name[jamfile_location]]]]] variable[module] assign[=] binary_operation[constant[Jamfile<%s>] <ast.Mod object at 0x7da2590d6920> name[jamfile_location]] call[name[self].location2module][name[jamfile_location]] assign[=] name[module] return[name[module]]
keyword[def] identifier[module_name] ( identifier[self] , identifier[jamfile_location] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[jamfile_location] , identifier[basestring] ) identifier[module] = identifier[self] . identifier[location2module] . identifier[get] ( identifier[jamfile_location] ) keyword[if] keyword[not] identifier[module] : identifier[jamfile_location] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[getcwd] (), identifier[jamfile_location] )) identifier[module] = literal[string] % identifier[jamfile_location] identifier[self] . identifier[location2module] [ identifier[jamfile_location] ]= identifier[module] keyword[return] identifier[module]
def module_name(self, jamfile_location): """Returns the name of module corresponding to 'jamfile-location'. If no module corresponds to location yet, associates default module name with that location.""" assert isinstance(jamfile_location, basestring) module = self.location2module.get(jamfile_location) if not module: # Root the path, so that locations are always umbiguious. # Without this, we can't decide if '../../exe/program1' and '.' # are the same paths, or not. jamfile_location = os.path.realpath(os.path.join(os.getcwd(), jamfile_location)) module = 'Jamfile<%s>' % jamfile_location self.location2module[jamfile_location] = module # depends on [control=['if'], data=[]] return module
def get_tiltplanes(self, sequence): ''' Extract tilting planes basing on distance map ''' tilting_planes = [] distance_map = [] for i in range(1, len(sequence)): distance_map.append([ sequence[i], self.virtual_atoms.get_distance( sequence[0], sequence[i] ) ]) distance_map = sorted(distance_map, key=lambda x: x[1]) if len(distance_map) == 4: # surface edge case # semi-octahedron at surface edge has only one tilting plane to consider sorted_dist = [i[0] for i in distance_map] if distance_map[-1][1] - distance_map[-2][1] < 0.5: # 1st case: max diff < 0.5 Angstrom, # meaning all distances to reference atom are similar, # therefore the reference atom is above the searched plane # and the searched plane consists of other atoms tilting_planes.append( [ i[0] for i in distance_map ] ) else: # 2nd case: reference atom belongs to the searched plane, # procedure needs to be repeated with the next atom as reference atom candidates = [sequence[0], sorted_dist[-1]] next_distance_map = [] next_distance_map.append([ sorted_dist[1], self.virtual_atoms.get_distance( sorted_dist[0], sorted_dist[1] ) ]) next_distance_map.append([ sorted_dist[2], self.virtual_atoms.get_distance( sorted_dist[0], sorted_dist[2] ) ]) next_distance_map = sorted(next_distance_map, key=lambda x: x[1]) next_sorted_dist = [i[0] for i in next_distance_map] # the next reference atom is taken above the plane (distances are similar) if next_distance_map[1][1] - next_distance_map[0][1] < 0.5: candidates.extend([ next_sorted_dist[0], next_sorted_dist[1] ]) # the next reference atom is taken in the plane (distances are different) else: candidates.extend([ sorted_dist[0], next_sorted_dist[1] ]) tilting_planes.append(candidates) elif len(distance_map) == 5: # full octahedron case # full octahedron has 3 different tilting planes (perpendicular in ideal case) sorted_dist = [i[0] for i in distance_map] # 1st plane is found as: first_plane = sorted_dist[0:4] tilting_planes.append(first_plane) distance_map_first_plane = [] for i in range(1, 4): distance_map_first_plane.append([ first_plane[i], self.virtual_atoms.get_distance( first_plane[0], first_plane[i] ) ]) distance_map_first_plane = sorted(distance_map_first_plane, key=lambda x: x[1]) sorted_first_plane = [i[0] for i in distance_map_first_plane] # 2nd and 3rd planes are found as: tilting_planes.append([ sequence[0], sorted_dist[4], first_plane[0], sorted_first_plane[2] ]) tilting_planes.append([ sequence[0], sorted_dist[4], sorted_first_plane[0], sorted_first_plane[1] ]) # filter planes by Z according to octahedral spatial compound filtered = list(filter(lambda x: abs(self.virtual_atoms[ x[0] ].z - self.virtual_atoms[ x[1] ].z) < self.OCTAHEDRON_ATOMS_Z_DIFFERENCE and \ abs(self.virtual_atoms[ x[1] ].z - self.virtual_atoms[ x[2] ].z) < self.OCTAHEDRON_ATOMS_Z_DIFFERENCE and \ abs(self.virtual_atoms[ x[2] ].z - self.virtual_atoms[ x[3] ].z) < self.OCTAHEDRON_ATOMS_Z_DIFFERENCE, tilting_planes )) # Py3 if len(filtered): tilting_planes = filtered return tilting_planes
def function[get_tiltplanes, parameter[self, sequence]]: constant[ Extract tilting planes basing on distance map ] variable[tilting_planes] assign[=] list[[]] variable[distance_map] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], call[name[len], parameter[name[sequence]]]]]] begin[:] call[name[distance_map].append, parameter[list[[<ast.Subscript object at 0x7da1b1943ac0>, <ast.Call object at 0x7da1b1943a30>]]]] variable[distance_map] assign[=] call[name[sorted], parameter[name[distance_map]]] if compare[call[name[len], parameter[name[distance_map]]] equal[==] constant[4]] begin[:] variable[sorted_dist] assign[=] <ast.ListComp object at 0x7da1b1942a10> if compare[binary_operation[call[call[name[distance_map]][<ast.UnaryOp object at 0x7da1b1942770>]][constant[1]] - call[call[name[distance_map]][<ast.UnaryOp object at 0x7da1b1942650>]][constant[1]]] less[<] constant[0.5]] begin[:] call[name[tilting_planes].append, parameter[<ast.ListComp object at 0x7da1b19424d0>]] variable[filtered] assign[=] call[name[list], parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b196cd00>, name[tilting_planes]]]]] if call[name[len], parameter[name[filtered]]] begin[:] variable[tilting_planes] assign[=] name[filtered] return[name[tilting_planes]]
keyword[def] identifier[get_tiltplanes] ( identifier[self] , identifier[sequence] ): literal[string] identifier[tilting_planes] =[] identifier[distance_map] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[sequence] )): identifier[distance_map] . identifier[append] ([ identifier[sequence] [ identifier[i] ], identifier[self] . identifier[virtual_atoms] . identifier[get_distance] ( identifier[sequence] [ literal[int] ], identifier[sequence] [ identifier[i] ])]) identifier[distance_map] = identifier[sorted] ( identifier[distance_map] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) keyword[if] identifier[len] ( identifier[distance_map] )== literal[int] : identifier[sorted_dist] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[distance_map] ] keyword[if] identifier[distance_map] [- literal[int] ][ literal[int] ]- identifier[distance_map] [- literal[int] ][ literal[int] ]< literal[int] : identifier[tilting_planes] . identifier[append] ([ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[distance_map] ]) keyword[else] : identifier[candidates] =[ identifier[sequence] [ literal[int] ], identifier[sorted_dist] [- literal[int] ]] identifier[next_distance_map] =[] identifier[next_distance_map] . identifier[append] ([ identifier[sorted_dist] [ literal[int] ], identifier[self] . identifier[virtual_atoms] . identifier[get_distance] ( identifier[sorted_dist] [ literal[int] ], identifier[sorted_dist] [ literal[int] ])]) identifier[next_distance_map] . identifier[append] ([ identifier[sorted_dist] [ literal[int] ], identifier[self] . identifier[virtual_atoms] . identifier[get_distance] ( identifier[sorted_dist] [ literal[int] ], identifier[sorted_dist] [ literal[int] ])]) identifier[next_distance_map] = identifier[sorted] ( identifier[next_distance_map] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) identifier[next_sorted_dist] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[next_distance_map] ] keyword[if] identifier[next_distance_map] [ literal[int] ][ literal[int] ]- identifier[next_distance_map] [ literal[int] ][ literal[int] ]< literal[int] : identifier[candidates] . identifier[extend] ([ identifier[next_sorted_dist] [ literal[int] ], identifier[next_sorted_dist] [ literal[int] ]]) keyword[else] : identifier[candidates] . identifier[extend] ([ identifier[sorted_dist] [ literal[int] ], identifier[next_sorted_dist] [ literal[int] ]]) identifier[tilting_planes] . identifier[append] ( identifier[candidates] ) keyword[elif] identifier[len] ( identifier[distance_map] )== literal[int] : identifier[sorted_dist] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[distance_map] ] identifier[first_plane] = identifier[sorted_dist] [ literal[int] : literal[int] ] identifier[tilting_planes] . identifier[append] ( identifier[first_plane] ) identifier[distance_map_first_plane] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[distance_map_first_plane] . identifier[append] ([ identifier[first_plane] [ identifier[i] ], identifier[self] . identifier[virtual_atoms] . identifier[get_distance] ( identifier[first_plane] [ literal[int] ], identifier[first_plane] [ identifier[i] ])]) identifier[distance_map_first_plane] = identifier[sorted] ( identifier[distance_map_first_plane] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) identifier[sorted_first_plane] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[distance_map_first_plane] ] identifier[tilting_planes] . identifier[append] ([ identifier[sequence] [ literal[int] ], identifier[sorted_dist] [ literal[int] ], identifier[first_plane] [ literal[int] ], identifier[sorted_first_plane] [ literal[int] ]]) identifier[tilting_planes] . identifier[append] ([ identifier[sequence] [ literal[int] ], identifier[sorted_dist] [ literal[int] ], identifier[sorted_first_plane] [ literal[int] ], identifier[sorted_first_plane] [ literal[int] ]]) identifier[filtered] = identifier[list] ( identifier[filter] ( keyword[lambda] identifier[x] : identifier[abs] ( identifier[self] . identifier[virtual_atoms] [ identifier[x] [ literal[int] ]]. identifier[z] - identifier[self] . identifier[virtual_atoms] [ identifier[x] [ literal[int] ]]. identifier[z] )< identifier[self] . identifier[OCTAHEDRON_ATOMS_Z_DIFFERENCE] keyword[and] identifier[abs] ( identifier[self] . identifier[virtual_atoms] [ identifier[x] [ literal[int] ]]. identifier[z] - identifier[self] . identifier[virtual_atoms] [ identifier[x] [ literal[int] ]]. identifier[z] )< identifier[self] . identifier[OCTAHEDRON_ATOMS_Z_DIFFERENCE] keyword[and] identifier[abs] ( identifier[self] . identifier[virtual_atoms] [ identifier[x] [ literal[int] ]]. identifier[z] - identifier[self] . identifier[virtual_atoms] [ identifier[x] [ literal[int] ]]. identifier[z] )< identifier[self] . identifier[OCTAHEDRON_ATOMS_Z_DIFFERENCE] , identifier[tilting_planes] )) keyword[if] identifier[len] ( identifier[filtered] ): identifier[tilting_planes] = identifier[filtered] keyword[return] identifier[tilting_planes]
def get_tiltplanes(self, sequence): """ Extract tilting planes basing on distance map """ tilting_planes = [] distance_map = [] for i in range(1, len(sequence)): distance_map.append([sequence[i], self.virtual_atoms.get_distance(sequence[0], sequence[i])]) # depends on [control=['for'], data=['i']] distance_map = sorted(distance_map, key=lambda x: x[1]) if len(distance_map) == 4: # surface edge case # semi-octahedron at surface edge has only one tilting plane to consider sorted_dist = [i[0] for i in distance_map] if distance_map[-1][1] - distance_map[-2][1] < 0.5: # 1st case: max diff < 0.5 Angstrom, # meaning all distances to reference atom are similar, # therefore the reference atom is above the searched plane # and the searched plane consists of other atoms tilting_planes.append([i[0] for i in distance_map]) # depends on [control=['if'], data=[]] else: # 2nd case: reference atom belongs to the searched plane, # procedure needs to be repeated with the next atom as reference atom candidates = [sequence[0], sorted_dist[-1]] next_distance_map = [] next_distance_map.append([sorted_dist[1], self.virtual_atoms.get_distance(sorted_dist[0], sorted_dist[1])]) next_distance_map.append([sorted_dist[2], self.virtual_atoms.get_distance(sorted_dist[0], sorted_dist[2])]) next_distance_map = sorted(next_distance_map, key=lambda x: x[1]) next_sorted_dist = [i[0] for i in next_distance_map] # the next reference atom is taken above the plane (distances are similar) if next_distance_map[1][1] - next_distance_map[0][1] < 0.5: candidates.extend([next_sorted_dist[0], next_sorted_dist[1]]) # depends on [control=['if'], data=[]] else: # the next reference atom is taken in the plane (distances are different) candidates.extend([sorted_dist[0], next_sorted_dist[1]]) tilting_planes.append(candidates) # depends on [control=['if'], data=[]] elif len(distance_map) == 5: # full octahedron case # full octahedron has 3 different tilting planes (perpendicular in ideal case) sorted_dist = [i[0] for i in distance_map] # 1st plane is found as: first_plane = sorted_dist[0:4] tilting_planes.append(first_plane) distance_map_first_plane = [] for i in range(1, 4): distance_map_first_plane.append([first_plane[i], self.virtual_atoms.get_distance(first_plane[0], first_plane[i])]) # depends on [control=['for'], data=['i']] distance_map_first_plane = sorted(distance_map_first_plane, key=lambda x: x[1]) sorted_first_plane = [i[0] for i in distance_map_first_plane] # 2nd and 3rd planes are found as: tilting_planes.append([sequence[0], sorted_dist[4], first_plane[0], sorted_first_plane[2]]) tilting_planes.append([sequence[0], sorted_dist[4], sorted_first_plane[0], sorted_first_plane[1]]) # depends on [control=['if'], data=[]] # filter planes by Z according to octahedral spatial compound filtered = list(filter(lambda x: abs(self.virtual_atoms[x[0]].z - self.virtual_atoms[x[1]].z) < self.OCTAHEDRON_ATOMS_Z_DIFFERENCE and abs(self.virtual_atoms[x[1]].z - self.virtual_atoms[x[2]].z) < self.OCTAHEDRON_ATOMS_Z_DIFFERENCE and (abs(self.virtual_atoms[x[2]].z - self.virtual_atoms[x[3]].z) < self.OCTAHEDRON_ATOMS_Z_DIFFERENCE), tilting_planes)) # Py3 if len(filtered): tilting_planes = filtered # depends on [control=['if'], data=[]] return tilting_planes
def save(self, fname): """Saves symbol to a file. You can also use pickle to do the job if you only work on python. The advantage of `load`/`save` functions is that the file contents are language agnostic. This means the model saved by one language binding can be loaded by a different language binding of `MXNet`. You also get the benefit of being able to directly load/save from cloud storage(S3, HDFS). Parameters ---------- fname : str The name of the file. - "s3://my-bucket/path/my-s3-symbol" - "hdfs://my-bucket/path/my-hdfs-symbol" - "/path-to/my-local-symbol" See Also -------- symbol.load : Used to load symbol from file. """ if not isinstance(fname, string_types): raise TypeError('fname need to be string') check_call(_LIB.MXSymbolSaveToFile(self.handle, c_str(fname)))
def function[save, parameter[self, fname]]: constant[Saves symbol to a file. You can also use pickle to do the job if you only work on python. The advantage of `load`/`save` functions is that the file contents are language agnostic. This means the model saved by one language binding can be loaded by a different language binding of `MXNet`. You also get the benefit of being able to directly load/save from cloud storage(S3, HDFS). Parameters ---------- fname : str The name of the file. - "s3://my-bucket/path/my-s3-symbol" - "hdfs://my-bucket/path/my-hdfs-symbol" - "/path-to/my-local-symbol" See Also -------- symbol.load : Used to load symbol from file. ] if <ast.UnaryOp object at 0x7da2054a53c0> begin[:] <ast.Raise object at 0x7da2054a6920> call[name[check_call], parameter[call[name[_LIB].MXSymbolSaveToFile, parameter[name[self].handle, call[name[c_str], parameter[name[fname]]]]]]]
keyword[def] identifier[save] ( identifier[self] , identifier[fname] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[fname] , identifier[string_types] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[check_call] ( identifier[_LIB] . identifier[MXSymbolSaveToFile] ( identifier[self] . identifier[handle] , identifier[c_str] ( identifier[fname] )))
def save(self, fname): """Saves symbol to a file. You can also use pickle to do the job if you only work on python. The advantage of `load`/`save` functions is that the file contents are language agnostic. This means the model saved by one language binding can be loaded by a different language binding of `MXNet`. You also get the benefit of being able to directly load/save from cloud storage(S3, HDFS). Parameters ---------- fname : str The name of the file. - "s3://my-bucket/path/my-s3-symbol" - "hdfs://my-bucket/path/my-hdfs-symbol" - "/path-to/my-local-symbol" See Also -------- symbol.load : Used to load symbol from file. """ if not isinstance(fname, string_types): raise TypeError('fname need to be string') # depends on [control=['if'], data=[]] check_call(_LIB.MXSymbolSaveToFile(self.handle, c_str(fname)))
def write(elem, a_writer): ''' Write a MicroXML element node (yes, even one representign a whole document) elem - Amara MicroXML element node to be written out writer - instance of amara3.uxml.writer to implement the writing process ''' a_writer.start_element(elem.xml_name, attribs=elem.xml_attributes) for node in elem.xml_children: if isinstance(node, tree.element): write(node, a_writer) elif isinstance(node, tree.text): a_writer.text(node) a_writer.end_element(elem.xml_name) return
def function[write, parameter[elem, a_writer]]: constant[ Write a MicroXML element node (yes, even one representign a whole document) elem - Amara MicroXML element node to be written out writer - instance of amara3.uxml.writer to implement the writing process ] call[name[a_writer].start_element, parameter[name[elem].xml_name]] for taget[name[node]] in starred[name[elem].xml_children] begin[:] if call[name[isinstance], parameter[name[node], name[tree].element]] begin[:] call[name[write], parameter[name[node], name[a_writer]]] call[name[a_writer].end_element, parameter[name[elem].xml_name]] return[None]
keyword[def] identifier[write] ( identifier[elem] , identifier[a_writer] ): literal[string] identifier[a_writer] . identifier[start_element] ( identifier[elem] . identifier[xml_name] , identifier[attribs] = identifier[elem] . identifier[xml_attributes] ) keyword[for] identifier[node] keyword[in] identifier[elem] . identifier[xml_children] : keyword[if] identifier[isinstance] ( identifier[node] , identifier[tree] . identifier[element] ): identifier[write] ( identifier[node] , identifier[a_writer] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[tree] . identifier[text] ): identifier[a_writer] . identifier[text] ( identifier[node] ) identifier[a_writer] . identifier[end_element] ( identifier[elem] . identifier[xml_name] ) keyword[return]
def write(elem, a_writer): """ Write a MicroXML element node (yes, even one representign a whole document) elem - Amara MicroXML element node to be written out writer - instance of amara3.uxml.writer to implement the writing process """ a_writer.start_element(elem.xml_name, attribs=elem.xml_attributes) for node in elem.xml_children: if isinstance(node, tree.element): write(node, a_writer) # depends on [control=['if'], data=[]] elif isinstance(node, tree.text): a_writer.text(node) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] a_writer.end_element(elem.xml_name) return
def read_string(buff, byteorder='big'): """Read a string from a file-like object.""" length = read_numeric(USHORT, buff, byteorder) return buff.read(length).decode('utf-8')
def function[read_string, parameter[buff, byteorder]]: constant[Read a string from a file-like object.] variable[length] assign[=] call[name[read_numeric], parameter[name[USHORT], name[buff], name[byteorder]]] return[call[call[name[buff].read, parameter[name[length]]].decode, parameter[constant[utf-8]]]]
keyword[def] identifier[read_string] ( identifier[buff] , identifier[byteorder] = literal[string] ): literal[string] identifier[length] = identifier[read_numeric] ( identifier[USHORT] , identifier[buff] , identifier[byteorder] ) keyword[return] identifier[buff] . identifier[read] ( identifier[length] ). identifier[decode] ( literal[string] )
def read_string(buff, byteorder='big'): """Read a string from a file-like object.""" length = read_numeric(USHORT, buff, byteorder) return buff.read(length).decode('utf-8')
def AnalizarLiquidacion(self, aut, liq=None, ajuste=False): "Método interno para analizar la respuesta de AFIP" # proceso los datos básicos de la liquidación (devuelto por consultar): if liq: self.params_out = dict( pto_emision=liq.get('ptoEmision'), nro_orden=liq.get('nroOrden'), cuit_comprador=liq.get('cuitComprador'), nro_act_comprador=liq.get('nroActComprador'), nro_ing_bruto_comprador=liq.get('nroIngBrutoComprador'), cod_tipo_operacion=liq.get('codTipoOperacion'), es_liquidacion_propia=liq.get('esLiquidacionPropia'), es_canje=liq.get('esCanje'), cod_puerto=liq.get('codPuerto'), des_puerto_localidad=liq.get('desPuertoLocalidad'), cod_grano=liq.get('codGrano'), cuit_vendedor=liq.get('cuitVendedor'), nro_ing_bruto_vendedor=liq.get('nroIngBrutoVendedor'), actua_corredor=liq.get('actuaCorredor'), liquida_corredor=liq.get('liquidaCorredor'), cuit_corredor=liq.get('cuitCorredor'), comision_corredor=liq.get('comisionCorredor'), nro_ing_bruto_corredor=liq.get('nroIngBrutoCorredor'), fecha_precio_operacion=liq.get('fechaPrecioOperacion'), precio_ref_tn=liq.get('precioRefTn'), cod_grado_ref=liq.get('codGradoRef'), cod_grado_ent=liq.get('codGradoEnt'), factor_ent=liq.get('factorEnt'), precio_flete_tn=liq.get('precioFleteTn'), cont_proteico=liq.get('contProteico'), alic_iva_operacion=liq.get('alicIvaOperacion'), campania_ppal=liq.get('campaniaPPal'), cod_localidad_procedencia=liq.get('codLocalidadProcedencia'), cod_prov_procedencia=liq.get('codProvProcedencia'), datos_adicionales=liq.get('datosAdicionales'), peso_neto_sin_certificado=liq.get('pesoNetoSinCertificado'), cod_localidad_procedencia_sin_certificado=liq.get('codLocalidadProcedenciaSinCertificado'), cod_prov_procedencia_sin_certificado=liq.get('codProvProcedenciaSinCertificado'), certificados=[], ) if ajuste: self.params_out.update( # ajustes: diferencia_peso_neto=liq.get('diferenciaPesoNeto'), diferencia_precio_operacion=liq.get('diferenciaPrecioOperacion'), cod_grado=liq.get('codGrado'), val_grado=liq.get('valGrado'), factor=liq.get('factor'), diferencia_precio_flete_tn=liq.get('diferenciaPrecioFleteTn'), concepto_importe_iva_0=liq.get('conceptoImporteIva0'), importe_ajustar_iva_0=liq.get('importeAjustarIva0'), concepto_importe_iva_105=liq.get('conceptoImporteIva105'), importe_ajustar_iva_105=liq.get('importeAjustarIva105'), concepto_importe_iva_21=liq.get('conceptoImporteIva21'), importe_ajustar_iva_21=liq.get('importeAjustarIva21'), ) # analizar detalle de importes ajustados discriminados por alicuota # (por compatibildiad y consistencia se usan los mismos campos) for it in liq.get("importes", liq.get("importe")): # en ajustes LSG no se agrupan los importes en un subtipo... if 'importeReturn' in it: it = it['importeReturn'][0] # TODO: revisar SOAP tasa = "iva_%s" % str(it['alicuota']).replace(".", "").strip() self.params_out["concepto_importe_%s" % tasa] = it['concepto'] self.params_out["importe_ajustar_%s" % tasa] = it['importe'] self.params_out["iva_calculado_%s" % tasa] = it['ivaCalculado'] if 'certificados' in liq: for c in liq['certificados']: cert = c['certificado'] self.params_out['certificados'].append(dict( tipo_certificado_deposito=cert['tipoCertificadoDeposito'], nro_certificado_deposito=cert['nroCertificadoDeposito'], peso_neto=cert['pesoNeto'], cod_localidad_procedencia=cert['codLocalidadProcedencia'], cod_prov_procedencia=cert['codProvProcedencia'], campania=cert['campania'], fecha_cierre=cert['fechaCierre'], )) self.params_out['errores'] = self.errores # proceso la respuesta de autorizar, ajustar (y consultar): if aut: self.TotalDeduccion = aut.get('totalDeduccion') self.TotalRetencion = aut.get('totalRetencion') self.TotalRetencionAfip = aut.get('totalRetencionAfip') self.TotalOtrasRetenciones = aut.get('totalOtrasRetenciones') self.TotalNetoAPagar = aut.get('totalNetoAPagar') self.TotalIvaRg4310_18 = aut.get('totalIvaRg4310_18') self.TotalPagoSegunCondicion = aut.get('totalPagoSegunCondicion') self.COE = str(aut.get('coe', '')) self.COEAjustado = aut.get('coeAjustado') self.Estado = aut.get('estado', '') self.NroContrato = aut.get('numeroContrato', '') # actualizo parámetros de salida: self.params_out['coe'] = self.COE self.params_out['coe_ajustado'] = self.COEAjustado self.params_out['estado'] = self.Estado self.params_out['total_deduccion'] = self.TotalDeduccion self.params_out['total_retencion'] = self.TotalRetencion self.params_out['total_retencion_afip'] = self.TotalRetencionAfip self.params_out['total_otras_retenciones'] = self.TotalOtrasRetenciones self.params_out['total_neto_a_pagar'] = self.TotalNetoAPagar self.params_out['total_iva_rg_4310_18'] = self.TotalIvaRg4310_18 self.params_out['total_pago_segun_condicion'] = self.TotalPagoSegunCondicion # datos adicionales: self.NroOrden = self.params_out['nro_orden'] = aut.get('nroOrden') self.params_out['cod_tipo_ajuste'] = aut.get('codTipoAjuste') fecha = aut.get('fechaLiquidacion') if fecha: fecha = str(fecha) self.params_out['fecha_liquidacion'] = fecha self.params_out['importe_iva'] = aut.get('importeIva') self.params_out['nro_op_comercial'] = aut.get('nroOpComercial') self.params_out['operacion_con_iva'] = aut.get('operacionConIva') self.params_out['precio_operacion'] = aut.get('precioOperacion') self.params_out['total_peso_neto'] = aut.get('totalPesoNeto') self.params_out['subtotal'] = aut.get('subTotal') # LSG (especificos): self.params_out['total_deducciones'] = aut.get('totalDeducciones') if 'todalPercepciones' in aut: # error de tipeo en el WSDL de AFIP... self.params_out['total_percepciones'] = aut.get('todalPercepciones') else: self.params_out['total_percepciones'] = aut.get('totalPercepciones') # sub estructuras: self.params_out['retenciones'] = [] self.params_out['deducciones'] = [] self.params_out['percepciones'] = [] for retret in aut.get("retenciones", []): retret = retret['retencionReturn'] self.params_out['retenciones'].append({ 'importe_retencion': retret['importeRetencion'], 'alicuota': retret['retencion'].get('alicuota'), 'base_calculo': retret['retencion'].get('baseCalculo'), 'codigo_concepto': retret['retencion'].get('codigoConcepto'), 'detalle_aclaratorio': (retret['retencion'].get('detalleAclaratorio') or "").replace("\n", ""), 'importe_certificado_retencion': retret['retencion'].get('importeCertificadoRetencion'), 'nro_certificado_retencion': retret['retencion'].get('nroCertificadoRetencion'), 'fecha_certificado_retencion': retret['retencion'].get('fechaCertificadoRetencion'), }) for dedret in aut.get("deducciones", []): dedret = dedret['deduccionReturn'] self.params_out['deducciones'].append({ 'importe_deduccion': dedret['importeDeduccion'], 'importe_iva': dedret.get('importeIva'), 'alicuota': dedret['deduccion'].get('alicuotaIva'), 'base_calculo': dedret['deduccion'].get('baseCalculo'), 'codigo_concepto': dedret['deduccion'].get('codigoConcepto'), 'detalle_aclaratorio': dedret['deduccion'].get('detalleAclaratorio', "").replace("\n", ""), 'dias_almacenaje': dedret['deduccion'].get('diasAlmacenaje'), 'precio_pkg_diario': dedret['deduccion'].get('precioPKGdiario'), 'comision_gastos_adm': dedret['deduccion'].get('comisionGastosAdm'), }) for perret in aut.get("percepciones", []): perret = perret.get('percepcionReturn', perret) self.params_out['percepciones'].append({ 'importe_final': perret['percepcion']['importeFinal'], 'alicuota': perret['percepcion'].get('alicuota'), 'base_calculo': perret['percepcion'].get('baseCalculo'), 'descripcion': perret['percepcion'].get('descripcion', "").replace("\n", ""), })
def function[AnalizarLiquidacion, parameter[self, aut, liq, ajuste]]: constant[Método interno para analizar la respuesta de AFIP] if name[liq] begin[:] name[self].params_out assign[=] call[name[dict], parameter[]] if name[ajuste] begin[:] call[name[self].params_out.update, parameter[]] for taget[name[it]] in starred[call[name[liq].get, parameter[constant[importes], call[name[liq].get, parameter[constant[importe]]]]]] begin[:] if compare[constant[importeReturn] in name[it]] begin[:] variable[it] assign[=] call[call[name[it]][constant[importeReturn]]][constant[0]] variable[tasa] assign[=] binary_operation[constant[iva_%s] <ast.Mod object at 0x7da2590d6920> call[call[call[name[str], parameter[call[name[it]][constant[alicuota]]]].replace, parameter[constant[.], constant[]]].strip, parameter[]]] call[name[self].params_out][binary_operation[constant[concepto_importe_%s] <ast.Mod object at 0x7da2590d6920> name[tasa]]] assign[=] call[name[it]][constant[concepto]] call[name[self].params_out][binary_operation[constant[importe_ajustar_%s] <ast.Mod object at 0x7da2590d6920> name[tasa]]] assign[=] call[name[it]][constant[importe]] call[name[self].params_out][binary_operation[constant[iva_calculado_%s] <ast.Mod object at 0x7da2590d6920> name[tasa]]] assign[=] call[name[it]][constant[ivaCalculado]] if compare[constant[certificados] in name[liq]] begin[:] for taget[name[c]] in starred[call[name[liq]][constant[certificados]]] begin[:] variable[cert] assign[=] call[name[c]][constant[certificado]] call[call[name[self].params_out][constant[certificados]].append, parameter[call[name[dict], parameter[]]]] call[name[self].params_out][constant[errores]] assign[=] name[self].errores if name[aut] begin[:] name[self].TotalDeduccion assign[=] call[name[aut].get, parameter[constant[totalDeduccion]]] name[self].TotalRetencion assign[=] call[name[aut].get, parameter[constant[totalRetencion]]] name[self].TotalRetencionAfip assign[=] call[name[aut].get, parameter[constant[totalRetencionAfip]]] name[self].TotalOtrasRetenciones assign[=] call[name[aut].get, parameter[constant[totalOtrasRetenciones]]] name[self].TotalNetoAPagar assign[=] call[name[aut].get, parameter[constant[totalNetoAPagar]]] name[self].TotalIvaRg4310_18 assign[=] call[name[aut].get, parameter[constant[totalIvaRg4310_18]]] name[self].TotalPagoSegunCondicion assign[=] call[name[aut].get, parameter[constant[totalPagoSegunCondicion]]] name[self].COE assign[=] call[name[str], parameter[call[name[aut].get, parameter[constant[coe], constant[]]]]] name[self].COEAjustado assign[=] call[name[aut].get, parameter[constant[coeAjustado]]] name[self].Estado assign[=] call[name[aut].get, parameter[constant[estado], constant[]]] name[self].NroContrato assign[=] call[name[aut].get, parameter[constant[numeroContrato], constant[]]] call[name[self].params_out][constant[coe]] assign[=] name[self].COE call[name[self].params_out][constant[coe_ajustado]] assign[=] name[self].COEAjustado call[name[self].params_out][constant[estado]] assign[=] name[self].Estado call[name[self].params_out][constant[total_deduccion]] assign[=] name[self].TotalDeduccion call[name[self].params_out][constant[total_retencion]] assign[=] name[self].TotalRetencion call[name[self].params_out][constant[total_retencion_afip]] assign[=] name[self].TotalRetencionAfip call[name[self].params_out][constant[total_otras_retenciones]] assign[=] name[self].TotalOtrasRetenciones call[name[self].params_out][constant[total_neto_a_pagar]] assign[=] name[self].TotalNetoAPagar call[name[self].params_out][constant[total_iva_rg_4310_18]] assign[=] name[self].TotalIvaRg4310_18 call[name[self].params_out][constant[total_pago_segun_condicion]] assign[=] name[self].TotalPagoSegunCondicion name[self].NroOrden assign[=] call[name[aut].get, parameter[constant[nroOrden]]] call[name[self].params_out][constant[cod_tipo_ajuste]] assign[=] call[name[aut].get, parameter[constant[codTipoAjuste]]] variable[fecha] assign[=] call[name[aut].get, parameter[constant[fechaLiquidacion]]] if name[fecha] begin[:] variable[fecha] assign[=] call[name[str], parameter[name[fecha]]] call[name[self].params_out][constant[fecha_liquidacion]] assign[=] name[fecha] call[name[self].params_out][constant[importe_iva]] assign[=] call[name[aut].get, parameter[constant[importeIva]]] call[name[self].params_out][constant[nro_op_comercial]] assign[=] call[name[aut].get, parameter[constant[nroOpComercial]]] call[name[self].params_out][constant[operacion_con_iva]] assign[=] call[name[aut].get, parameter[constant[operacionConIva]]] call[name[self].params_out][constant[precio_operacion]] assign[=] call[name[aut].get, parameter[constant[precioOperacion]]] call[name[self].params_out][constant[total_peso_neto]] assign[=] call[name[aut].get, parameter[constant[totalPesoNeto]]] call[name[self].params_out][constant[subtotal]] assign[=] call[name[aut].get, parameter[constant[subTotal]]] call[name[self].params_out][constant[total_deducciones]] assign[=] call[name[aut].get, parameter[constant[totalDeducciones]]] if compare[constant[todalPercepciones] in name[aut]] begin[:] call[name[self].params_out][constant[total_percepciones]] assign[=] call[name[aut].get, parameter[constant[todalPercepciones]]] call[name[self].params_out][constant[retenciones]] assign[=] list[[]] call[name[self].params_out][constant[deducciones]] assign[=] list[[]] call[name[self].params_out][constant[percepciones]] assign[=] list[[]] for taget[name[retret]] in starred[call[name[aut].get, parameter[constant[retenciones], list[[]]]]] begin[:] variable[retret] assign[=] call[name[retret]][constant[retencionReturn]] call[call[name[self].params_out][constant[retenciones]].append, parameter[dictionary[[<ast.Constant object at 0x7da18bc73280>, <ast.Constant object at 0x7da18bc71c30>, <ast.Constant object at 0x7da18bc72ef0>, <ast.Constant object at 0x7da18bc71c00>, <ast.Constant object at 0x7da18bc71a80>, <ast.Constant object at 0x7da18bc720b0>, <ast.Constant object at 0x7da18bc70310>, <ast.Constant object at 0x7da18bc72f20>], [<ast.Subscript object at 0x7da18bc73a30>, <ast.Call object at 0x7da18bc70370>, <ast.Call object at 0x7da18bc72680>, <ast.Call object at 0x7da18bc73340>, <ast.Call object at 0x7da18bc70250>, <ast.Call object at 0x7da18bc73e50>, <ast.Call object at 0x7da18bc71ba0>, <ast.Call object at 0x7da18bc72d40>]]]] for taget[name[dedret]] in starred[call[name[aut].get, parameter[constant[deducciones], list[[]]]]] begin[:] variable[dedret] assign[=] call[name[dedret]][constant[deduccionReturn]] call[call[name[self].params_out][constant[deducciones]].append, parameter[dictionary[[<ast.Constant object at 0x7da18bc72740>, <ast.Constant object at 0x7da18bc706d0>, <ast.Constant object at 0x7da18bc73070>, <ast.Constant object at 0x7da18bc724d0>, <ast.Constant object at 0x7da18bc72590>, <ast.Constant object at 0x7da18bc70b50>, <ast.Constant object at 0x7da18bc725c0>, <ast.Constant object at 0x7da18bc727a0>, <ast.Constant object at 0x7da18bc71660>], [<ast.Subscript object at 0x7da18bc72b60>, <ast.Call object at 0x7da18bc72bc0>, <ast.Call object at 0x7da18bc72b00>, <ast.Call object at 0x7da18bc707c0>, <ast.Call object at 0x7da18bc738b0>, <ast.Call object at 0x7da18bc73ca0>, <ast.Call object at 0x7da18bc729b0>, <ast.Call object at 0x7da18bc728f0>, <ast.Call object at 0x7da18bc73130>]]]] for taget[name[perret]] in starred[call[name[aut].get, parameter[constant[percepciones], list[[]]]]] begin[:] variable[perret] assign[=] call[name[perret].get, parameter[constant[percepcionReturn], name[perret]]] call[call[name[self].params_out][constant[percepciones]].append, parameter[dictionary[[<ast.Constant object at 0x7da1b1d556f0>, <ast.Constant object at 0x7da1b1d551e0>, <ast.Constant object at 0x7da1b1d55c30>, <ast.Constant object at 0x7da1b1d556c0>], [<ast.Subscript object at 0x7da1b1d55c00>, <ast.Call object at 0x7da1b1d55720>, <ast.Call object at 0x7da1b1d55a80>, <ast.Call object at 0x7da1b1d55870>]]]]
keyword[def] identifier[AnalizarLiquidacion] ( identifier[self] , identifier[aut] , identifier[liq] = keyword[None] , identifier[ajuste] = keyword[False] ): literal[string] keyword[if] identifier[liq] : identifier[self] . identifier[params_out] = identifier[dict] ( identifier[pto_emision] = identifier[liq] . identifier[get] ( literal[string] ), identifier[nro_orden] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cuit_comprador] = identifier[liq] . identifier[get] ( literal[string] ), identifier[nro_act_comprador] = identifier[liq] . identifier[get] ( literal[string] ), identifier[nro_ing_bruto_comprador] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_tipo_operacion] = identifier[liq] . identifier[get] ( literal[string] ), identifier[es_liquidacion_propia] = identifier[liq] . identifier[get] ( literal[string] ), identifier[es_canje] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_puerto] = identifier[liq] . identifier[get] ( literal[string] ), identifier[des_puerto_localidad] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_grano] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cuit_vendedor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[nro_ing_bruto_vendedor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[actua_corredor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[liquida_corredor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cuit_corredor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[comision_corredor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[nro_ing_bruto_corredor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[fecha_precio_operacion] = identifier[liq] . identifier[get] ( literal[string] ), identifier[precio_ref_tn] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_grado_ref] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_grado_ent] = identifier[liq] . identifier[get] ( literal[string] ), identifier[factor_ent] = identifier[liq] . identifier[get] ( literal[string] ), identifier[precio_flete_tn] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cont_proteico] = identifier[liq] . identifier[get] ( literal[string] ), identifier[alic_iva_operacion] = identifier[liq] . identifier[get] ( literal[string] ), identifier[campania_ppal] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_localidad_procedencia] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_prov_procedencia] = identifier[liq] . identifier[get] ( literal[string] ), identifier[datos_adicionales] = identifier[liq] . identifier[get] ( literal[string] ), identifier[peso_neto_sin_certificado] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_localidad_procedencia_sin_certificado] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_prov_procedencia_sin_certificado] = identifier[liq] . identifier[get] ( literal[string] ), identifier[certificados] =[], ) keyword[if] identifier[ajuste] : identifier[self] . identifier[params_out] . identifier[update] ( identifier[diferencia_peso_neto] = identifier[liq] . identifier[get] ( literal[string] ), identifier[diferencia_precio_operacion] = identifier[liq] . identifier[get] ( literal[string] ), identifier[cod_grado] = identifier[liq] . identifier[get] ( literal[string] ), identifier[val_grado] = identifier[liq] . identifier[get] ( literal[string] ), identifier[factor] = identifier[liq] . identifier[get] ( literal[string] ), identifier[diferencia_precio_flete_tn] = identifier[liq] . identifier[get] ( literal[string] ), identifier[concepto_importe_iva_0] = identifier[liq] . identifier[get] ( literal[string] ), identifier[importe_ajustar_iva_0] = identifier[liq] . identifier[get] ( literal[string] ), identifier[concepto_importe_iva_105] = identifier[liq] . identifier[get] ( literal[string] ), identifier[importe_ajustar_iva_105] = identifier[liq] . identifier[get] ( literal[string] ), identifier[concepto_importe_iva_21] = identifier[liq] . identifier[get] ( literal[string] ), identifier[importe_ajustar_iva_21] = identifier[liq] . identifier[get] ( literal[string] ), ) keyword[for] identifier[it] keyword[in] identifier[liq] . identifier[get] ( literal[string] , identifier[liq] . identifier[get] ( literal[string] )): keyword[if] literal[string] keyword[in] identifier[it] : identifier[it] = identifier[it] [ literal[string] ][ literal[int] ] identifier[tasa] = literal[string] % identifier[str] ( identifier[it] [ literal[string] ]). identifier[replace] ( literal[string] , literal[string] ). identifier[strip] () identifier[self] . identifier[params_out] [ literal[string] % identifier[tasa] ]= identifier[it] [ literal[string] ] identifier[self] . identifier[params_out] [ literal[string] % identifier[tasa] ]= identifier[it] [ literal[string] ] identifier[self] . identifier[params_out] [ literal[string] % identifier[tasa] ]= identifier[it] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[liq] : keyword[for] identifier[c] keyword[in] identifier[liq] [ literal[string] ]: identifier[cert] = identifier[c] [ literal[string] ] identifier[self] . identifier[params_out] [ literal[string] ]. identifier[append] ( identifier[dict] ( identifier[tipo_certificado_deposito] = identifier[cert] [ literal[string] ], identifier[nro_certificado_deposito] = identifier[cert] [ literal[string] ], identifier[peso_neto] = identifier[cert] [ literal[string] ], identifier[cod_localidad_procedencia] = identifier[cert] [ literal[string] ], identifier[cod_prov_procedencia] = identifier[cert] [ literal[string] ], identifier[campania] = identifier[cert] [ literal[string] ], identifier[fecha_cierre] = identifier[cert] [ literal[string] ], )) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[errores] keyword[if] identifier[aut] : identifier[self] . identifier[TotalDeduccion] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[TotalRetencion] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[TotalRetencionAfip] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[TotalOtrasRetenciones] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[TotalNetoAPagar] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[TotalIvaRg4310_18] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[TotalPagoSegunCondicion] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[COE] = identifier[str] ( identifier[aut] . identifier[get] ( literal[string] , literal[string] )) identifier[self] . identifier[COEAjustado] = identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[Estado] = identifier[aut] . identifier[get] ( literal[string] , literal[string] ) identifier[self] . identifier[NroContrato] = identifier[aut] . identifier[get] ( literal[string] , literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[COE] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[COEAjustado] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[Estado] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalDeduccion] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalRetencion] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalRetencionAfip] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalOtrasRetenciones] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalNetoAPagar] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalIvaRg4310_18] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[self] . identifier[TotalPagoSegunCondicion] identifier[self] . identifier[NroOrden] = identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[fecha] = identifier[aut] . identifier[get] ( literal[string] ) keyword[if] identifier[fecha] : identifier[fecha] = identifier[str] ( identifier[fecha] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[fecha] identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[aut] : identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) keyword[else] : identifier[self] . identifier[params_out] [ literal[string] ]= identifier[aut] . identifier[get] ( literal[string] ) identifier[self] . identifier[params_out] [ literal[string] ]=[] identifier[self] . identifier[params_out] [ literal[string] ]=[] identifier[self] . identifier[params_out] [ literal[string] ]=[] keyword[for] identifier[retret] keyword[in] identifier[aut] . identifier[get] ( literal[string] ,[]): identifier[retret] = identifier[retret] [ literal[string] ] identifier[self] . identifier[params_out] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[retret] [ literal[string] ], literal[string] : identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] :( identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ) keyword[or] literal[string] ). identifier[replace] ( literal[string] , literal[string] ), literal[string] : identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[retret] [ literal[string] ]. identifier[get] ( literal[string] ), }) keyword[for] identifier[dedret] keyword[in] identifier[aut] . identifier[get] ( literal[string] ,[]): identifier[dedret] = identifier[dedret] [ literal[string] ] identifier[self] . identifier[params_out] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[dedret] [ literal[string] ], literal[string] : identifier[dedret] . identifier[get] ( literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[dedret] [ literal[string] ]. identifier[get] ( literal[string] ), }) keyword[for] identifier[perret] keyword[in] identifier[aut] . identifier[get] ( literal[string] ,[]): identifier[perret] = identifier[perret] . identifier[get] ( literal[string] , identifier[perret] ) identifier[self] . identifier[params_out] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[perret] [ literal[string] ][ literal[string] ], literal[string] : identifier[perret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[perret] [ literal[string] ]. identifier[get] ( literal[string] ), literal[string] : identifier[perret] [ literal[string] ]. identifier[get] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ), })
def AnalizarLiquidacion(self, aut, liq=None, ajuste=False): """Método interno para analizar la respuesta de AFIP""" # proceso los datos básicos de la liquidación (devuelto por consultar): if liq: self.params_out = dict(pto_emision=liq.get('ptoEmision'), nro_orden=liq.get('nroOrden'), cuit_comprador=liq.get('cuitComprador'), nro_act_comprador=liq.get('nroActComprador'), nro_ing_bruto_comprador=liq.get('nroIngBrutoComprador'), cod_tipo_operacion=liq.get('codTipoOperacion'), es_liquidacion_propia=liq.get('esLiquidacionPropia'), es_canje=liq.get('esCanje'), cod_puerto=liq.get('codPuerto'), des_puerto_localidad=liq.get('desPuertoLocalidad'), cod_grano=liq.get('codGrano'), cuit_vendedor=liq.get('cuitVendedor'), nro_ing_bruto_vendedor=liq.get('nroIngBrutoVendedor'), actua_corredor=liq.get('actuaCorredor'), liquida_corredor=liq.get('liquidaCorredor'), cuit_corredor=liq.get('cuitCorredor'), comision_corredor=liq.get('comisionCorredor'), nro_ing_bruto_corredor=liq.get('nroIngBrutoCorredor'), fecha_precio_operacion=liq.get('fechaPrecioOperacion'), precio_ref_tn=liq.get('precioRefTn'), cod_grado_ref=liq.get('codGradoRef'), cod_grado_ent=liq.get('codGradoEnt'), factor_ent=liq.get('factorEnt'), precio_flete_tn=liq.get('precioFleteTn'), cont_proteico=liq.get('contProteico'), alic_iva_operacion=liq.get('alicIvaOperacion'), campania_ppal=liq.get('campaniaPPal'), cod_localidad_procedencia=liq.get('codLocalidadProcedencia'), cod_prov_procedencia=liq.get('codProvProcedencia'), datos_adicionales=liq.get('datosAdicionales'), peso_neto_sin_certificado=liq.get('pesoNetoSinCertificado'), cod_localidad_procedencia_sin_certificado=liq.get('codLocalidadProcedenciaSinCertificado'), cod_prov_procedencia_sin_certificado=liq.get('codProvProcedenciaSinCertificado'), certificados=[]) if ajuste: # ajustes: self.params_out.update(diferencia_peso_neto=liq.get('diferenciaPesoNeto'), diferencia_precio_operacion=liq.get('diferenciaPrecioOperacion'), cod_grado=liq.get('codGrado'), val_grado=liq.get('valGrado'), factor=liq.get('factor'), diferencia_precio_flete_tn=liq.get('diferenciaPrecioFleteTn'), concepto_importe_iva_0=liq.get('conceptoImporteIva0'), importe_ajustar_iva_0=liq.get('importeAjustarIva0'), concepto_importe_iva_105=liq.get('conceptoImporteIva105'), importe_ajustar_iva_105=liq.get('importeAjustarIva105'), concepto_importe_iva_21=liq.get('conceptoImporteIva21'), importe_ajustar_iva_21=liq.get('importeAjustarIva21')) # analizar detalle de importes ajustados discriminados por alicuota # (por compatibildiad y consistencia se usan los mismos campos) for it in liq.get('importes', liq.get('importe')): # en ajustes LSG no se agrupan los importes en un subtipo... if 'importeReturn' in it: it = it['importeReturn'][0] # TODO: revisar SOAP # depends on [control=['if'], data=['it']] tasa = 'iva_%s' % str(it['alicuota']).replace('.', '').strip() self.params_out['concepto_importe_%s' % tasa] = it['concepto'] self.params_out['importe_ajustar_%s' % tasa] = it['importe'] self.params_out['iva_calculado_%s' % tasa] = it['ivaCalculado'] # depends on [control=['for'], data=['it']] # depends on [control=['if'], data=[]] if 'certificados' in liq: for c in liq['certificados']: cert = c['certificado'] self.params_out['certificados'].append(dict(tipo_certificado_deposito=cert['tipoCertificadoDeposito'], nro_certificado_deposito=cert['nroCertificadoDeposito'], peso_neto=cert['pesoNeto'], cod_localidad_procedencia=cert['codLocalidadProcedencia'], cod_prov_procedencia=cert['codProvProcedencia'], campania=cert['campania'], fecha_cierre=cert['fechaCierre'])) # depends on [control=['for'], data=['c']] # depends on [control=['if'], data=['liq']] # depends on [control=['if'], data=[]] self.params_out['errores'] = self.errores # proceso la respuesta de autorizar, ajustar (y consultar): if aut: self.TotalDeduccion = aut.get('totalDeduccion') self.TotalRetencion = aut.get('totalRetencion') self.TotalRetencionAfip = aut.get('totalRetencionAfip') self.TotalOtrasRetenciones = aut.get('totalOtrasRetenciones') self.TotalNetoAPagar = aut.get('totalNetoAPagar') self.TotalIvaRg4310_18 = aut.get('totalIvaRg4310_18') self.TotalPagoSegunCondicion = aut.get('totalPagoSegunCondicion') self.COE = str(aut.get('coe', '')) self.COEAjustado = aut.get('coeAjustado') self.Estado = aut.get('estado', '') self.NroContrato = aut.get('numeroContrato', '') # actualizo parámetros de salida: self.params_out['coe'] = self.COE self.params_out['coe_ajustado'] = self.COEAjustado self.params_out['estado'] = self.Estado self.params_out['total_deduccion'] = self.TotalDeduccion self.params_out['total_retencion'] = self.TotalRetencion self.params_out['total_retencion_afip'] = self.TotalRetencionAfip self.params_out['total_otras_retenciones'] = self.TotalOtrasRetenciones self.params_out['total_neto_a_pagar'] = self.TotalNetoAPagar self.params_out['total_iva_rg_4310_18'] = self.TotalIvaRg4310_18 self.params_out['total_pago_segun_condicion'] = self.TotalPagoSegunCondicion # datos adicionales: self.NroOrden = self.params_out['nro_orden'] = aut.get('nroOrden') self.params_out['cod_tipo_ajuste'] = aut.get('codTipoAjuste') fecha = aut.get('fechaLiquidacion') if fecha: fecha = str(fecha) # depends on [control=['if'], data=[]] self.params_out['fecha_liquidacion'] = fecha self.params_out['importe_iva'] = aut.get('importeIva') self.params_out['nro_op_comercial'] = aut.get('nroOpComercial') self.params_out['operacion_con_iva'] = aut.get('operacionConIva') self.params_out['precio_operacion'] = aut.get('precioOperacion') self.params_out['total_peso_neto'] = aut.get('totalPesoNeto') self.params_out['subtotal'] = aut.get('subTotal') # LSG (especificos): self.params_out['total_deducciones'] = aut.get('totalDeducciones') if 'todalPercepciones' in aut: # error de tipeo en el WSDL de AFIP... self.params_out['total_percepciones'] = aut.get('todalPercepciones') # depends on [control=['if'], data=['aut']] else: self.params_out['total_percepciones'] = aut.get('totalPercepciones') # sub estructuras: self.params_out['retenciones'] = [] self.params_out['deducciones'] = [] self.params_out['percepciones'] = [] for retret in aut.get('retenciones', []): retret = retret['retencionReturn'] self.params_out['retenciones'].append({'importe_retencion': retret['importeRetencion'], 'alicuota': retret['retencion'].get('alicuota'), 'base_calculo': retret['retencion'].get('baseCalculo'), 'codigo_concepto': retret['retencion'].get('codigoConcepto'), 'detalle_aclaratorio': (retret['retencion'].get('detalleAclaratorio') or '').replace('\n', ''), 'importe_certificado_retencion': retret['retencion'].get('importeCertificadoRetencion'), 'nro_certificado_retencion': retret['retencion'].get('nroCertificadoRetencion'), 'fecha_certificado_retencion': retret['retencion'].get('fechaCertificadoRetencion')}) # depends on [control=['for'], data=['retret']] for dedret in aut.get('deducciones', []): dedret = dedret['deduccionReturn'] self.params_out['deducciones'].append({'importe_deduccion': dedret['importeDeduccion'], 'importe_iva': dedret.get('importeIva'), 'alicuota': dedret['deduccion'].get('alicuotaIva'), 'base_calculo': dedret['deduccion'].get('baseCalculo'), 'codigo_concepto': dedret['deduccion'].get('codigoConcepto'), 'detalle_aclaratorio': dedret['deduccion'].get('detalleAclaratorio', '').replace('\n', ''), 'dias_almacenaje': dedret['deduccion'].get('diasAlmacenaje'), 'precio_pkg_diario': dedret['deduccion'].get('precioPKGdiario'), 'comision_gastos_adm': dedret['deduccion'].get('comisionGastosAdm')}) # depends on [control=['for'], data=['dedret']] for perret in aut.get('percepciones', []): perret = perret.get('percepcionReturn', perret) self.params_out['percepciones'].append({'importe_final': perret['percepcion']['importeFinal'], 'alicuota': perret['percepcion'].get('alicuota'), 'base_calculo': perret['percepcion'].get('baseCalculo'), 'descripcion': perret['percepcion'].get('descripcion', '').replace('\n', '')}) # depends on [control=['for'], data=['perret']] # depends on [control=['if'], data=[]]
def build(self, pipeline_def, artifacts_persisted): '''Builds the execution plan. ''' # Construct dependency dictionary deps = {step.key: set() for step in self.steps} for step in self.steps: for step_input in step.step_inputs: deps[step.key].add(step_input.prev_output_handle.step_key) step_dict = {step.key: step for step in self.steps} return ExecutionPlan(pipeline_def, step_dict, deps, artifacts_persisted)
def function[build, parameter[self, pipeline_def, artifacts_persisted]]: constant[Builds the execution plan. ] variable[deps] assign[=] <ast.DictComp object at 0x7da1b03a6470> for taget[name[step]] in starred[name[self].steps] begin[:] for taget[name[step_input]] in starred[name[step].step_inputs] begin[:] call[call[name[deps]][name[step].key].add, parameter[name[step_input].prev_output_handle.step_key]] variable[step_dict] assign[=] <ast.DictComp object at 0x7da1b0314f70> return[call[name[ExecutionPlan], parameter[name[pipeline_def], name[step_dict], name[deps], name[artifacts_persisted]]]]
keyword[def] identifier[build] ( identifier[self] , identifier[pipeline_def] , identifier[artifacts_persisted] ): literal[string] identifier[deps] ={ identifier[step] . identifier[key] : identifier[set] () keyword[for] identifier[step] keyword[in] identifier[self] . identifier[steps] } keyword[for] identifier[step] keyword[in] identifier[self] . identifier[steps] : keyword[for] identifier[step_input] keyword[in] identifier[step] . identifier[step_inputs] : identifier[deps] [ identifier[step] . identifier[key] ]. identifier[add] ( identifier[step_input] . identifier[prev_output_handle] . identifier[step_key] ) identifier[step_dict] ={ identifier[step] . identifier[key] : identifier[step] keyword[for] identifier[step] keyword[in] identifier[self] . identifier[steps] } keyword[return] identifier[ExecutionPlan] ( identifier[pipeline_def] , identifier[step_dict] , identifier[deps] , identifier[artifacts_persisted] )
def build(self, pipeline_def, artifacts_persisted): """Builds the execution plan. """ # Construct dependency dictionary deps = {step.key: set() for step in self.steps} for step in self.steps: for step_input in step.step_inputs: deps[step.key].add(step_input.prev_output_handle.step_key) # depends on [control=['for'], data=['step_input']] # depends on [control=['for'], data=['step']] step_dict = {step.key: step for step in self.steps} return ExecutionPlan(pipeline_def, step_dict, deps, artifacts_persisted)
def _filter_cluster_data(self): """ Filter the cluster data catalog into the filtered_data catalog, which is what is shown in the H-R diagram. Filter on the values of the sliders, as well as the lasso selection in the skyviewer. """ min_temp = self.temperature_range_slider.value[0] max_temp = self.temperature_range_slider.value[1] temp_mask = np.logical_and( self.cluster.catalog['temperature'] >= min_temp, self.cluster.catalog['temperature'] <= max_temp ) min_lum = self.luminosity_range_slider.value[0] max_lum = self.luminosity_range_slider.value[1] lum_mask = np.logical_and( self.cluster.catalog['luminosity'] >= min_lum, self.cluster.catalog['luminosity'] <= max_lum ) selected_mask = np.isin(self.cluster.catalog['id'], self.selection_ids) filter_mask = temp_mask & lum_mask & selected_mask self.filtered_data = self.cluster.catalog[filter_mask].data self.source.data = { 'id': list(self.filtered_data['id']), 'temperature': list(self.filtered_data['temperature']), 'luminosity': list(self.filtered_data['luminosity']), 'color': list(self.filtered_data['color']) } logging.debug("Selected data is now: %s", self.filtered_data)
def function[_filter_cluster_data, parameter[self]]: constant[ Filter the cluster data catalog into the filtered_data catalog, which is what is shown in the H-R diagram. Filter on the values of the sliders, as well as the lasso selection in the skyviewer. ] variable[min_temp] assign[=] call[name[self].temperature_range_slider.value][constant[0]] variable[max_temp] assign[=] call[name[self].temperature_range_slider.value][constant[1]] variable[temp_mask] assign[=] call[name[np].logical_and, parameter[compare[call[name[self].cluster.catalog][constant[temperature]] greater_or_equal[>=] name[min_temp]], compare[call[name[self].cluster.catalog][constant[temperature]] less_or_equal[<=] name[max_temp]]]] variable[min_lum] assign[=] call[name[self].luminosity_range_slider.value][constant[0]] variable[max_lum] assign[=] call[name[self].luminosity_range_slider.value][constant[1]] variable[lum_mask] assign[=] call[name[np].logical_and, parameter[compare[call[name[self].cluster.catalog][constant[luminosity]] greater_or_equal[>=] name[min_lum]], compare[call[name[self].cluster.catalog][constant[luminosity]] less_or_equal[<=] name[max_lum]]]] variable[selected_mask] assign[=] call[name[np].isin, parameter[call[name[self].cluster.catalog][constant[id]], name[self].selection_ids]] variable[filter_mask] assign[=] binary_operation[binary_operation[name[temp_mask] <ast.BitAnd object at 0x7da2590d6b60> name[lum_mask]] <ast.BitAnd object at 0x7da2590d6b60> name[selected_mask]] name[self].filtered_data assign[=] call[name[self].cluster.catalog][name[filter_mask]].data name[self].source.data assign[=] dictionary[[<ast.Constant object at 0x7da1afe0f550>, <ast.Constant object at 0x7da1afe0ebf0>, <ast.Constant object at 0x7da1afe0f6d0>, <ast.Constant object at 0x7da1afe0e350>], [<ast.Call object at 0x7da1afe0f6a0>, <ast.Call object at 0x7da1afe0f1c0>, <ast.Call object at 0x7da1afe0fe20>, <ast.Call object at 0x7da1afe0e380>]] call[name[logging].debug, parameter[constant[Selected data is now: %s], name[self].filtered_data]]
keyword[def] identifier[_filter_cluster_data] ( identifier[self] ): literal[string] identifier[min_temp] = identifier[self] . identifier[temperature_range_slider] . identifier[value] [ literal[int] ] identifier[max_temp] = identifier[self] . identifier[temperature_range_slider] . identifier[value] [ literal[int] ] identifier[temp_mask] = identifier[np] . identifier[logical_and] ( identifier[self] . identifier[cluster] . identifier[catalog] [ literal[string] ]>= identifier[min_temp] , identifier[self] . identifier[cluster] . identifier[catalog] [ literal[string] ]<= identifier[max_temp] ) identifier[min_lum] = identifier[self] . identifier[luminosity_range_slider] . identifier[value] [ literal[int] ] identifier[max_lum] = identifier[self] . identifier[luminosity_range_slider] . identifier[value] [ literal[int] ] identifier[lum_mask] = identifier[np] . identifier[logical_and] ( identifier[self] . identifier[cluster] . identifier[catalog] [ literal[string] ]>= identifier[min_lum] , identifier[self] . identifier[cluster] . identifier[catalog] [ literal[string] ]<= identifier[max_lum] ) identifier[selected_mask] = identifier[np] . identifier[isin] ( identifier[self] . identifier[cluster] . identifier[catalog] [ literal[string] ], identifier[self] . identifier[selection_ids] ) identifier[filter_mask] = identifier[temp_mask] & identifier[lum_mask] & identifier[selected_mask] identifier[self] . identifier[filtered_data] = identifier[self] . identifier[cluster] . identifier[catalog] [ identifier[filter_mask] ]. identifier[data] identifier[self] . identifier[source] . identifier[data] ={ literal[string] : identifier[list] ( identifier[self] . identifier[filtered_data] [ literal[string] ]), literal[string] : identifier[list] ( identifier[self] . identifier[filtered_data] [ literal[string] ]), literal[string] : identifier[list] ( identifier[self] . identifier[filtered_data] [ literal[string] ]), literal[string] : identifier[list] ( identifier[self] . identifier[filtered_data] [ literal[string] ]) } identifier[logging] . identifier[debug] ( literal[string] , identifier[self] . identifier[filtered_data] )
def _filter_cluster_data(self): """ Filter the cluster data catalog into the filtered_data catalog, which is what is shown in the H-R diagram. Filter on the values of the sliders, as well as the lasso selection in the skyviewer. """ min_temp = self.temperature_range_slider.value[0] max_temp = self.temperature_range_slider.value[1] temp_mask = np.logical_and(self.cluster.catalog['temperature'] >= min_temp, self.cluster.catalog['temperature'] <= max_temp) min_lum = self.luminosity_range_slider.value[0] max_lum = self.luminosity_range_slider.value[1] lum_mask = np.logical_and(self.cluster.catalog['luminosity'] >= min_lum, self.cluster.catalog['luminosity'] <= max_lum) selected_mask = np.isin(self.cluster.catalog['id'], self.selection_ids) filter_mask = temp_mask & lum_mask & selected_mask self.filtered_data = self.cluster.catalog[filter_mask].data self.source.data = {'id': list(self.filtered_data['id']), 'temperature': list(self.filtered_data['temperature']), 'luminosity': list(self.filtered_data['luminosity']), 'color': list(self.filtered_data['color'])} logging.debug('Selected data is now: %s', self.filtered_data)
def upload( cls, files, metadata=None, tags=None, project=None, coerce_ascii=False, progressbar=None ): """Uploads a series of files to the One Codex server. Parameters ---------- files : `string` or `tuple` A single path to a file on the system, or a tuple containing a pairs of paths. Tuple values will be interleaved as paired-end reads and both files should contain the same number of records. Paths to single files will be uploaded as-is. metadata : `dict`, optional tags : `list`, optional project : `string`, optional UUID of project to associate this sample with. coerce_ascii : `bool`, optional If true, rename unicode filenames to ASCII and issue warning. progressbar : `click.progressbar`, optional If passed, display a progress bar using Click. Returns ------- A `Samples` object upon successful upload. None if the upload failed. """ res = cls._resource if not isinstance(files, string_types) and not isinstance(files, tuple): raise OneCodexException( "Please pass a string or tuple or forward and reverse filepaths." ) if not isinstance(project, Projects) and project is not None: project_search = Projects.get(project) if not project_search: project_search = Projects.where(name=project) if not project_search: try: project_search = Projects.where(project_name=project) except HTTPError: project_search = None if not project_search: raise OneCodexException("{} is not a valid project UUID".format(project)) if isinstance(project_search, list): project = project_search[0] sample_id = upload_sequence( files, res._client.session, res, metadata=metadata, tags=tags, project=project, coerce_ascii=coerce_ascii, progressbar=progressbar, ) return cls.get(sample_id)
def function[upload, parameter[cls, files, metadata, tags, project, coerce_ascii, progressbar]]: constant[Uploads a series of files to the One Codex server. Parameters ---------- files : `string` or `tuple` A single path to a file on the system, or a tuple containing a pairs of paths. Tuple values will be interleaved as paired-end reads and both files should contain the same number of records. Paths to single files will be uploaded as-is. metadata : `dict`, optional tags : `list`, optional project : `string`, optional UUID of project to associate this sample with. coerce_ascii : `bool`, optional If true, rename unicode filenames to ASCII and issue warning. progressbar : `click.progressbar`, optional If passed, display a progress bar using Click. Returns ------- A `Samples` object upon successful upload. None if the upload failed. ] variable[res] assign[=] name[cls]._resource if <ast.BoolOp object at 0x7da2044c2380> begin[:] <ast.Raise object at 0x7da2044c16c0> if <ast.BoolOp object at 0x7da2044c2ad0> begin[:] variable[project_search] assign[=] call[name[Projects].get, parameter[name[project]]] if <ast.UnaryOp object at 0x7da2044c03a0> begin[:] variable[project_search] assign[=] call[name[Projects].where, parameter[]] if <ast.UnaryOp object at 0x7da2044c21a0> begin[:] <ast.Try object at 0x7da2044c35b0> if <ast.UnaryOp object at 0x7da2044c18d0> begin[:] <ast.Raise object at 0x7da2044c30a0> if call[name[isinstance], parameter[name[project_search], name[list]]] begin[:] variable[project] assign[=] call[name[project_search]][constant[0]] variable[sample_id] assign[=] call[name[upload_sequence], parameter[name[files], name[res]._client.session, name[res]]] return[call[name[cls].get, parameter[name[sample_id]]]]
keyword[def] identifier[upload] ( identifier[cls] , identifier[files] , identifier[metadata] = keyword[None] , identifier[tags] = keyword[None] , identifier[project] = keyword[None] , identifier[coerce_ascii] = keyword[False] , identifier[progressbar] = keyword[None] ): literal[string] identifier[res] = identifier[cls] . identifier[_resource] keyword[if] keyword[not] identifier[isinstance] ( identifier[files] , identifier[string_types] ) keyword[and] keyword[not] identifier[isinstance] ( identifier[files] , identifier[tuple] ): keyword[raise] identifier[OneCodexException] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[project] , identifier[Projects] ) keyword[and] identifier[project] keyword[is] keyword[not] keyword[None] : identifier[project_search] = identifier[Projects] . identifier[get] ( identifier[project] ) keyword[if] keyword[not] identifier[project_search] : identifier[project_search] = identifier[Projects] . identifier[where] ( identifier[name] = identifier[project] ) keyword[if] keyword[not] identifier[project_search] : keyword[try] : identifier[project_search] = identifier[Projects] . identifier[where] ( identifier[project_name] = identifier[project] ) keyword[except] identifier[HTTPError] : identifier[project_search] = keyword[None] keyword[if] keyword[not] identifier[project_search] : keyword[raise] identifier[OneCodexException] ( literal[string] . identifier[format] ( identifier[project] )) keyword[if] identifier[isinstance] ( identifier[project_search] , identifier[list] ): identifier[project] = identifier[project_search] [ literal[int] ] identifier[sample_id] = identifier[upload_sequence] ( identifier[files] , identifier[res] . identifier[_client] . identifier[session] , identifier[res] , identifier[metadata] = identifier[metadata] , identifier[tags] = identifier[tags] , identifier[project] = identifier[project] , identifier[coerce_ascii] = identifier[coerce_ascii] , identifier[progressbar] = identifier[progressbar] , ) keyword[return] identifier[cls] . identifier[get] ( identifier[sample_id] )
def upload(cls, files, metadata=None, tags=None, project=None, coerce_ascii=False, progressbar=None): """Uploads a series of files to the One Codex server. Parameters ---------- files : `string` or `tuple` A single path to a file on the system, or a tuple containing a pairs of paths. Tuple values will be interleaved as paired-end reads and both files should contain the same number of records. Paths to single files will be uploaded as-is. metadata : `dict`, optional tags : `list`, optional project : `string`, optional UUID of project to associate this sample with. coerce_ascii : `bool`, optional If true, rename unicode filenames to ASCII and issue warning. progressbar : `click.progressbar`, optional If passed, display a progress bar using Click. Returns ------- A `Samples` object upon successful upload. None if the upload failed. """ res = cls._resource if not isinstance(files, string_types) and (not isinstance(files, tuple)): raise OneCodexException('Please pass a string or tuple or forward and reverse filepaths.') # depends on [control=['if'], data=[]] if not isinstance(project, Projects) and project is not None: project_search = Projects.get(project) if not project_search: project_search = Projects.where(name=project) # depends on [control=['if'], data=[]] if not project_search: try: project_search = Projects.where(project_name=project) # depends on [control=['try'], data=[]] except HTTPError: project_search = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if not project_search: raise OneCodexException('{} is not a valid project UUID'.format(project)) # depends on [control=['if'], data=[]] if isinstance(project_search, list): project = project_search[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] sample_id = upload_sequence(files, res._client.session, res, metadata=metadata, tags=tags, project=project, coerce_ascii=coerce_ascii, progressbar=progressbar) return cls.get(sample_id)
def _validate_function(self, func, name): """ Tests `self.style_function` and `self.highlight_function` to ensure they are functions returning dictionaries. """ test_feature = self.data['features'][0] if not callable(func) or not isinstance(func(test_feature), dict): raise ValueError('{} should be a function that accepts items from ' 'data[\'features\'] and returns a dictionary.' .format(name))
def function[_validate_function, parameter[self, func, name]]: constant[ Tests `self.style_function` and `self.highlight_function` to ensure they are functions returning dictionaries. ] variable[test_feature] assign[=] call[call[name[self].data][constant[features]]][constant[0]] if <ast.BoolOp object at 0x7da20cabc2e0> begin[:] <ast.Raise object at 0x7da20cabd8a0>
keyword[def] identifier[_validate_function] ( identifier[self] , identifier[func] , identifier[name] ): literal[string] identifier[test_feature] = identifier[self] . identifier[data] [ literal[string] ][ literal[int] ] keyword[if] keyword[not] identifier[callable] ( identifier[func] ) keyword[or] keyword[not] identifier[isinstance] ( identifier[func] ( identifier[test_feature] ), identifier[dict] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[name] ))
def _validate_function(self, func, name): """ Tests `self.style_function` and `self.highlight_function` to ensure they are functions returning dictionaries. """ test_feature = self.data['features'][0] if not callable(func) or not isinstance(func(test_feature), dict): raise ValueError("{} should be a function that accepts items from data['features'] and returns a dictionary.".format(name)) # depends on [control=['if'], data=[]]
def move_pos(line=1, column=1, file=sys.stdout): """ Move the cursor to a new position. Values are 1-based, and default to 1. Esc[<line>;<column>H or Esc[<line>;<column>f """ move.pos(line=line, col=column).write(file=file)
def function[move_pos, parameter[line, column, file]]: constant[ Move the cursor to a new position. Values are 1-based, and default to 1. Esc[<line>;<column>H or Esc[<line>;<column>f ] call[call[name[move].pos, parameter[]].write, parameter[]]
keyword[def] identifier[move_pos] ( identifier[line] = literal[int] , identifier[column] = literal[int] , identifier[file] = identifier[sys] . identifier[stdout] ): literal[string] identifier[move] . identifier[pos] ( identifier[line] = identifier[line] , identifier[col] = identifier[column] ). identifier[write] ( identifier[file] = identifier[file] )
def move_pos(line=1, column=1, file=sys.stdout): """ Move the cursor to a new position. Values are 1-based, and default to 1. Esc[<line>;<column>H or Esc[<line>;<column>f """ move.pos(line=line, col=column).write(file=file)
def batch_iterable(iterable, count): """ Yield batches of `count` items from the given iterable. >>> for x in batch([1, 2, 3, 4, 5, 6, 7], 3): >>> print(x) [1, 2, 3] [4, 5, 6] [7] :param iterable: An iterable :type iterable: Iterable :param count: Number of items per batch. If <= 0, nothing is yielded. :type count: int :return: Iterable of lists of items :rtype: Iterable[list[object]] """ if count <= 0: return current_batch = [] for item in iterable: if len(current_batch) == count: yield current_batch current_batch = [] current_batch.append(item) if current_batch: yield current_batch
def function[batch_iterable, parameter[iterable, count]]: constant[ Yield batches of `count` items from the given iterable. >>> for x in batch([1, 2, 3, 4, 5, 6, 7], 3): >>> print(x) [1, 2, 3] [4, 5, 6] [7] :param iterable: An iterable :type iterable: Iterable :param count: Number of items per batch. If <= 0, nothing is yielded. :type count: int :return: Iterable of lists of items :rtype: Iterable[list[object]] ] if compare[name[count] less_or_equal[<=] constant[0]] begin[:] return[None] variable[current_batch] assign[=] list[[]] for taget[name[item]] in starred[name[iterable]] begin[:] if compare[call[name[len], parameter[name[current_batch]]] equal[==] name[count]] begin[:] <ast.Yield object at 0x7da18f09e920> variable[current_batch] assign[=] list[[]] call[name[current_batch].append, parameter[name[item]]] if name[current_batch] begin[:] <ast.Yield object at 0x7da20c76f760>
keyword[def] identifier[batch_iterable] ( identifier[iterable] , identifier[count] ): literal[string] keyword[if] identifier[count] <= literal[int] : keyword[return] identifier[current_batch] =[] keyword[for] identifier[item] keyword[in] identifier[iterable] : keyword[if] identifier[len] ( identifier[current_batch] )== identifier[count] : keyword[yield] identifier[current_batch] identifier[current_batch] =[] identifier[current_batch] . identifier[append] ( identifier[item] ) keyword[if] identifier[current_batch] : keyword[yield] identifier[current_batch]
def batch_iterable(iterable, count): """ Yield batches of `count` items from the given iterable. >>> for x in batch([1, 2, 3, 4, 5, 6, 7], 3): >>> print(x) [1, 2, 3] [4, 5, 6] [7] :param iterable: An iterable :type iterable: Iterable :param count: Number of items per batch. If <= 0, nothing is yielded. :type count: int :return: Iterable of lists of items :rtype: Iterable[list[object]] """ if count <= 0: return # depends on [control=['if'], data=[]] current_batch = [] for item in iterable: if len(current_batch) == count: yield current_batch current_batch = [] # depends on [control=['if'], data=[]] current_batch.append(item) # depends on [control=['for'], data=['item']] if current_batch: yield current_batch # depends on [control=['if'], data=[]]
def discover(self): """Method to send a discovery message """ if self.transport: if self.discovery_countdown <= 0: self.discovery_countdown = self.discovery_interval msg = GetService(BROADCAST_MAC, self.source_id, seq_num=0, payload={}, ack_requested=False, response_requested=True) self.transport.sendto(msg.generate_packed_message(), (self.broadcast_ip, UDP_BROADCAST_PORT)) else: self.discovery_countdown -= self.discovery_step self.loop.call_later(self.discovery_step, self.discover)
def function[discover, parameter[self]]: constant[Method to send a discovery message ] if name[self].transport begin[:] if compare[name[self].discovery_countdown less_or_equal[<=] constant[0]] begin[:] name[self].discovery_countdown assign[=] name[self].discovery_interval variable[msg] assign[=] call[name[GetService], parameter[name[BROADCAST_MAC], name[self].source_id]] call[name[self].transport.sendto, parameter[call[name[msg].generate_packed_message, parameter[]], tuple[[<ast.Attribute object at 0x7da204566bf0>, <ast.Name object at 0x7da2045669b0>]]]] call[name[self].loop.call_later, parameter[name[self].discovery_step, name[self].discover]]
keyword[def] identifier[discover] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[transport] : keyword[if] identifier[self] . identifier[discovery_countdown] <= literal[int] : identifier[self] . identifier[discovery_countdown] = identifier[self] . identifier[discovery_interval] identifier[msg] = identifier[GetService] ( identifier[BROADCAST_MAC] , identifier[self] . identifier[source_id] , identifier[seq_num] = literal[int] , identifier[payload] ={}, identifier[ack_requested] = keyword[False] , identifier[response_requested] = keyword[True] ) identifier[self] . identifier[transport] . identifier[sendto] ( identifier[msg] . identifier[generate_packed_message] (),( identifier[self] . identifier[broadcast_ip] , identifier[UDP_BROADCAST_PORT] )) keyword[else] : identifier[self] . identifier[discovery_countdown] -= identifier[self] . identifier[discovery_step] identifier[self] . identifier[loop] . identifier[call_later] ( identifier[self] . identifier[discovery_step] , identifier[self] . identifier[discover] )
def discover(self): """Method to send a discovery message """ if self.transport: if self.discovery_countdown <= 0: self.discovery_countdown = self.discovery_interval msg = GetService(BROADCAST_MAC, self.source_id, seq_num=0, payload={}, ack_requested=False, response_requested=True) self.transport.sendto(msg.generate_packed_message(), (self.broadcast_ip, UDP_BROADCAST_PORT)) # depends on [control=['if'], data=[]] else: self.discovery_countdown -= self.discovery_step self.loop.call_later(self.discovery_step, self.discover) # depends on [control=['if'], data=[]]
def key_from_keybase(username, fingerprint=None): """Look up a public key from a username""" url = keybase_lookup_url(username) resp = requests.get(url) if resp.status_code == 200: j_resp = json.loads(polite_string(resp.content)) if 'them' in j_resp and len(j_resp['them']) == 1: kb_obj = j_resp['them'][0] if fingerprint: return fingerprint_from_keybase(fingerprint, kb_obj) else: if 'public_keys' in kb_obj \ and 'pgp_public_keys' in kb_obj['public_keys']: key = kb_obj['public_keys']['primary'] return massage_key(key) return None
def function[key_from_keybase, parameter[username, fingerprint]]: constant[Look up a public key from a username] variable[url] assign[=] call[name[keybase_lookup_url], parameter[name[username]]] variable[resp] assign[=] call[name[requests].get, parameter[name[url]]] if compare[name[resp].status_code equal[==] constant[200]] begin[:] variable[j_resp] assign[=] call[name[json].loads, parameter[call[name[polite_string], parameter[name[resp].content]]]] if <ast.BoolOp object at 0x7da1b19f3ac0> begin[:] variable[kb_obj] assign[=] call[call[name[j_resp]][constant[them]]][constant[0]] if name[fingerprint] begin[:] return[call[name[fingerprint_from_keybase], parameter[name[fingerprint], name[kb_obj]]]] return[constant[None]]
keyword[def] identifier[key_from_keybase] ( identifier[username] , identifier[fingerprint] = keyword[None] ): literal[string] identifier[url] = identifier[keybase_lookup_url] ( identifier[username] ) identifier[resp] = identifier[requests] . identifier[get] ( identifier[url] ) keyword[if] identifier[resp] . identifier[status_code] == literal[int] : identifier[j_resp] = identifier[json] . identifier[loads] ( identifier[polite_string] ( identifier[resp] . identifier[content] )) keyword[if] literal[string] keyword[in] identifier[j_resp] keyword[and] identifier[len] ( identifier[j_resp] [ literal[string] ])== literal[int] : identifier[kb_obj] = identifier[j_resp] [ literal[string] ][ literal[int] ] keyword[if] identifier[fingerprint] : keyword[return] identifier[fingerprint_from_keybase] ( identifier[fingerprint] , identifier[kb_obj] ) keyword[else] : keyword[if] literal[string] keyword[in] identifier[kb_obj] keyword[and] literal[string] keyword[in] identifier[kb_obj] [ literal[string] ]: identifier[key] = identifier[kb_obj] [ literal[string] ][ literal[string] ] keyword[return] identifier[massage_key] ( identifier[key] ) keyword[return] keyword[None]
def key_from_keybase(username, fingerprint=None): """Look up a public key from a username""" url = keybase_lookup_url(username) resp = requests.get(url) if resp.status_code == 200: j_resp = json.loads(polite_string(resp.content)) if 'them' in j_resp and len(j_resp['them']) == 1: kb_obj = j_resp['them'][0] if fingerprint: return fingerprint_from_keybase(fingerprint, kb_obj) # depends on [control=['if'], data=[]] elif 'public_keys' in kb_obj and 'pgp_public_keys' in kb_obj['public_keys']: key = kb_obj['public_keys']['primary'] return massage_key(key) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return None