code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def bottom(self): """ Move this object to the bottom of the ordered stack. """ o = self.get_ordering_queryset().aggregate(Max('order')).get('order__max') self.to(o)
def function[bottom, parameter[self]]: constant[ Move this object to the bottom of the ordered stack. ] variable[o] assign[=] call[call[call[name[self].get_ordering_queryset, parameter[]].aggregate, parameter[call[name[Max], parameter[constant[order]]]]].get, parameter[constant[order__max]]] call[name[self].to, parameter[name[o]]]
keyword[def] identifier[bottom] ( identifier[self] ): literal[string] identifier[o] = identifier[self] . identifier[get_ordering_queryset] (). identifier[aggregate] ( identifier[Max] ( literal[string] )). identifier[get] ( literal[string] ) identifier[self] . identifier[to] ( identifier[o] )
def bottom(self): """ Move this object to the bottom of the ordered stack. """ o = self.get_ordering_queryset().aggregate(Max('order')).get('order__max') self.to(o)
def equal(mol, query, largest_only=True, ignore_hydrogen=True): """ if mol is exactly same structure as the query, return True Args: mol: Compound query: Compound """ m = molutil.clone(mol) q = molutil.clone(query) if largest_only: m = molutil.largest_graph(m) q = molutil.largest_graph(q) if ignore_hydrogen: m = molutil.make_Hs_implicit(m) q = molutil.make_Hs_implicit(q) if molutil.mw(m) == molutil.mw(q): gm = GraphMatcher(q.graph, m.graph, node_match=atom_match) return gm.is_isomorphic() return False
def function[equal, parameter[mol, query, largest_only, ignore_hydrogen]]: constant[ if mol is exactly same structure as the query, return True Args: mol: Compound query: Compound ] variable[m] assign[=] call[name[molutil].clone, parameter[name[mol]]] variable[q] assign[=] call[name[molutil].clone, parameter[name[query]]] if name[largest_only] begin[:] variable[m] assign[=] call[name[molutil].largest_graph, parameter[name[m]]] variable[q] assign[=] call[name[molutil].largest_graph, parameter[name[q]]] if name[ignore_hydrogen] begin[:] variable[m] assign[=] call[name[molutil].make_Hs_implicit, parameter[name[m]]] variable[q] assign[=] call[name[molutil].make_Hs_implicit, parameter[name[q]]] if compare[call[name[molutil].mw, parameter[name[m]]] equal[==] call[name[molutil].mw, parameter[name[q]]]] begin[:] variable[gm] assign[=] call[name[GraphMatcher], parameter[name[q].graph, name[m].graph]] return[call[name[gm].is_isomorphic, parameter[]]] return[constant[False]]
keyword[def] identifier[equal] ( identifier[mol] , identifier[query] , identifier[largest_only] = keyword[True] , identifier[ignore_hydrogen] = keyword[True] ): literal[string] identifier[m] = identifier[molutil] . identifier[clone] ( identifier[mol] ) identifier[q] = identifier[molutil] . identifier[clone] ( identifier[query] ) keyword[if] identifier[largest_only] : identifier[m] = identifier[molutil] . identifier[largest_graph] ( identifier[m] ) identifier[q] = identifier[molutil] . identifier[largest_graph] ( identifier[q] ) keyword[if] identifier[ignore_hydrogen] : identifier[m] = identifier[molutil] . identifier[make_Hs_implicit] ( identifier[m] ) identifier[q] = identifier[molutil] . identifier[make_Hs_implicit] ( identifier[q] ) keyword[if] identifier[molutil] . identifier[mw] ( identifier[m] )== identifier[molutil] . identifier[mw] ( identifier[q] ): identifier[gm] = identifier[GraphMatcher] ( identifier[q] . identifier[graph] , identifier[m] . identifier[graph] , identifier[node_match] = identifier[atom_match] ) keyword[return] identifier[gm] . identifier[is_isomorphic] () keyword[return] keyword[False]
def equal(mol, query, largest_only=True, ignore_hydrogen=True): """ if mol is exactly same structure as the query, return True Args: mol: Compound query: Compound """ m = molutil.clone(mol) q = molutil.clone(query) if largest_only: m = molutil.largest_graph(m) q = molutil.largest_graph(q) # depends on [control=['if'], data=[]] if ignore_hydrogen: m = molutil.make_Hs_implicit(m) q = molutil.make_Hs_implicit(q) # depends on [control=['if'], data=[]] if molutil.mw(m) == molutil.mw(q): gm = GraphMatcher(q.graph, m.graph, node_match=atom_match) return gm.is_isomorphic() # depends on [control=['if'], data=[]] return False
def _load_neighbors_from_external_source(self) -> None: """ Loads the neighbors of the node from the igraph `Graph` instance that is wrapped by the graph that has this node. """ graph: SpotifyArtistGraph = self._graph items: List[NameExternalIDPair] = graph.client.similar_artists(self.external_id) limit: int = graph.neighbor_count if graph.neighbor_count > 0 else self._NEIGHBORS_TO_LOAD if len(items) > limit: del items[limit:] for item in items: neighbor: SpotifyArtistNode = graph.nodes.get_node_by_name(item.name, can_validate_and_load=True, external_id=item.external_id) # Strangely we need this guard because the Spofity API's search method doesn't # recognise certain artist names. # Actually it could also be a bug in SpotifyClient.search_artists_by_name(), # the artist name sent as a request parameter may not be encoded 100% correctly... # Anyway, this is a working hotfix. if neighbor is not None: graph.add_edge(self, neighbor)
def function[_load_neighbors_from_external_source, parameter[self]]: constant[ Loads the neighbors of the node from the igraph `Graph` instance that is wrapped by the graph that has this node. ] <ast.AnnAssign object at 0x7da2054a7e50> <ast.AnnAssign object at 0x7da2054a4a30> <ast.AnnAssign object at 0x7da2054a7c70> if compare[call[name[len], parameter[name[items]]] greater[>] name[limit]] begin[:] <ast.Delete object at 0x7da2054a5e10> for taget[name[item]] in starred[name[items]] begin[:] <ast.AnnAssign object at 0x7da2054a6080> if compare[name[neighbor] is_not constant[None]] begin[:] call[name[graph].add_edge, parameter[name[self], name[neighbor]]]
keyword[def] identifier[_load_neighbors_from_external_source] ( identifier[self] )-> keyword[None] : literal[string] identifier[graph] : identifier[SpotifyArtistGraph] = identifier[self] . identifier[_graph] identifier[items] : identifier[List] [ identifier[NameExternalIDPair] ]= identifier[graph] . identifier[client] . identifier[similar_artists] ( identifier[self] . identifier[external_id] ) identifier[limit] : identifier[int] = identifier[graph] . identifier[neighbor_count] keyword[if] identifier[graph] . identifier[neighbor_count] > literal[int] keyword[else] identifier[self] . identifier[_NEIGHBORS_TO_LOAD] keyword[if] identifier[len] ( identifier[items] )> identifier[limit] : keyword[del] identifier[items] [ identifier[limit] :] keyword[for] identifier[item] keyword[in] identifier[items] : identifier[neighbor] : identifier[SpotifyArtistNode] = identifier[graph] . identifier[nodes] . identifier[get_node_by_name] ( identifier[item] . identifier[name] , identifier[can_validate_and_load] = keyword[True] , identifier[external_id] = identifier[item] . identifier[external_id] ) keyword[if] identifier[neighbor] keyword[is] keyword[not] keyword[None] : identifier[graph] . identifier[add_edge] ( identifier[self] , identifier[neighbor] )
def _load_neighbors_from_external_source(self) -> None: """ Loads the neighbors of the node from the igraph `Graph` instance that is wrapped by the graph that has this node. """ graph: SpotifyArtistGraph = self._graph items: List[NameExternalIDPair] = graph.client.similar_artists(self.external_id) limit: int = graph.neighbor_count if graph.neighbor_count > 0 else self._NEIGHBORS_TO_LOAD if len(items) > limit: del items[limit:] # depends on [control=['if'], data=['limit']] for item in items: neighbor: SpotifyArtistNode = graph.nodes.get_node_by_name(item.name, can_validate_and_load=True, external_id=item.external_id) # Strangely we need this guard because the Spofity API's search method doesn't # recognise certain artist names. # Actually it could also be a bug in SpotifyClient.search_artists_by_name(), # the artist name sent as a request parameter may not be encoded 100% correctly... # Anyway, this is a working hotfix. if neighbor is not None: graph.add_edge(self, neighbor) # depends on [control=['if'], data=['neighbor']] # depends on [control=['for'], data=['item']]
def read_component_sitemap( self, sitemapindex_uri, sitemap_uri, sitemap, sitemapindex_is_file): """Read a component sitemap of a Resource List with index. Each component must be a sitemap with the """ if (sitemapindex_is_file): if (not self.is_file_uri(sitemap_uri)): # Attempt to map URI to local file remote_uri = sitemap_uri sitemap_uri = self.mapper.src_to_dst(remote_uri) self.logger.info( "Mapped %s to local file %s" % (remote_uri, sitemap_uri)) else: # The individual sitemaps should be at a URL (scheme/server/path) # that the sitemapindex URL can speak authoritatively about if (self.check_url_authority and not UrlAuthority(sitemapindex_uri).has_authority_over(sitemap_uri)): raise ListBaseIndexError( "The sitemapindex (%s) refers to sitemap at a location it does not have authority over (%s)" % (sitemapindex_uri, sitemap_uri)) try: fh = URLopener().open(sitemap_uri) self.num_files += 1 except IOError as e: raise ListBaseIndexError( "Failed to load sitemap from %s listed in sitemap index %s (%s)" % (sitemap_uri, sitemapindex_uri, str(e))) # Get the Content-Length if we can (works fine for local files) try: self.content_length = int(fh.info()['Content-Length']) self.bytes_read += self.content_length except KeyError: # If we don't get a length then c'est la vie pass self.logger.info( "Reading sitemap from %s (%d bytes)" % (sitemap_uri, self.content_length)) component = sitemap.parse_xml(fh=fh, sitemapindex=False) # Copy resources into self, check any metadata for r in component: self.resources.add(r)
def function[read_component_sitemap, parameter[self, sitemapindex_uri, sitemap_uri, sitemap, sitemapindex_is_file]]: constant[Read a component sitemap of a Resource List with index. Each component must be a sitemap with the ] if name[sitemapindex_is_file] begin[:] if <ast.UnaryOp object at 0x7da1b26a7a60> begin[:] variable[remote_uri] assign[=] name[sitemap_uri] variable[sitemap_uri] assign[=] call[name[self].mapper.src_to_dst, parameter[name[remote_uri]]] call[name[self].logger.info, parameter[binary_operation[constant[Mapped %s to local file %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2617ac0>, <ast.Name object at 0x7da1b2617eb0>]]]]] <ast.Try object at 0x7da1b2616a10> <ast.Try object at 0x7da1b2617400> call[name[self].logger.info, parameter[binary_operation[constant[Reading sitemap from %s (%d bytes)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2614f70>, <ast.Attribute object at 0x7da1b2616bf0>]]]]] variable[component] assign[=] call[name[sitemap].parse_xml, parameter[]] for taget[name[r]] in starred[name[component]] begin[:] call[name[self].resources.add, parameter[name[r]]]
keyword[def] identifier[read_component_sitemap] ( identifier[self] , identifier[sitemapindex_uri] , identifier[sitemap_uri] , identifier[sitemap] , identifier[sitemapindex_is_file] ): literal[string] keyword[if] ( identifier[sitemapindex_is_file] ): keyword[if] ( keyword[not] identifier[self] . identifier[is_file_uri] ( identifier[sitemap_uri] )): identifier[remote_uri] = identifier[sitemap_uri] identifier[sitemap_uri] = identifier[self] . identifier[mapper] . identifier[src_to_dst] ( identifier[remote_uri] ) identifier[self] . identifier[logger] . identifier[info] ( literal[string] % ( identifier[remote_uri] , identifier[sitemap_uri] )) keyword[else] : keyword[if] ( identifier[self] . identifier[check_url_authority] keyword[and] keyword[not] identifier[UrlAuthority] ( identifier[sitemapindex_uri] ). identifier[has_authority_over] ( identifier[sitemap_uri] )): keyword[raise] identifier[ListBaseIndexError] ( literal[string] % ( identifier[sitemapindex_uri] , identifier[sitemap_uri] )) keyword[try] : identifier[fh] = identifier[URLopener] (). identifier[open] ( identifier[sitemap_uri] ) identifier[self] . identifier[num_files] += literal[int] keyword[except] identifier[IOError] keyword[as] identifier[e] : keyword[raise] identifier[ListBaseIndexError] ( literal[string] % ( identifier[sitemap_uri] , identifier[sitemapindex_uri] , identifier[str] ( identifier[e] ))) keyword[try] : identifier[self] . identifier[content_length] = identifier[int] ( identifier[fh] . identifier[info] ()[ literal[string] ]) identifier[self] . identifier[bytes_read] += identifier[self] . identifier[content_length] keyword[except] identifier[KeyError] : keyword[pass] identifier[self] . identifier[logger] . identifier[info] ( literal[string] % ( identifier[sitemap_uri] , identifier[self] . identifier[content_length] )) identifier[component] = identifier[sitemap] . identifier[parse_xml] ( identifier[fh] = identifier[fh] , identifier[sitemapindex] = keyword[False] ) keyword[for] identifier[r] keyword[in] identifier[component] : identifier[self] . identifier[resources] . identifier[add] ( identifier[r] )
def read_component_sitemap(self, sitemapindex_uri, sitemap_uri, sitemap, sitemapindex_is_file): """Read a component sitemap of a Resource List with index. Each component must be a sitemap with the """ if sitemapindex_is_file: if not self.is_file_uri(sitemap_uri): # Attempt to map URI to local file remote_uri = sitemap_uri sitemap_uri = self.mapper.src_to_dst(remote_uri) self.logger.info('Mapped %s to local file %s' % (remote_uri, sitemap_uri)) # depends on [control=['if'], data=[]] # The individual sitemaps should be at a URL (scheme/server/path) # that the sitemapindex URL can speak authoritatively about elif self.check_url_authority and (not UrlAuthority(sitemapindex_uri).has_authority_over(sitemap_uri)): raise ListBaseIndexError('The sitemapindex (%s) refers to sitemap at a location it does not have authority over (%s)' % (sitemapindex_uri, sitemap_uri)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] try: fh = URLopener().open(sitemap_uri) self.num_files += 1 # depends on [control=['try'], data=[]] except IOError as e: raise ListBaseIndexError('Failed to load sitemap from %s listed in sitemap index %s (%s)' % (sitemap_uri, sitemapindex_uri, str(e))) # depends on [control=['except'], data=['e']] # Get the Content-Length if we can (works fine for local files) try: self.content_length = int(fh.info()['Content-Length']) self.bytes_read += self.content_length # depends on [control=['try'], data=[]] except KeyError: # If we don't get a length then c'est la vie pass # depends on [control=['except'], data=[]] self.logger.info('Reading sitemap from %s (%d bytes)' % (sitemap_uri, self.content_length)) component = sitemap.parse_xml(fh=fh, sitemapindex=False) # Copy resources into self, check any metadata for r in component: self.resources.add(r) # depends on [control=['for'], data=['r']]
def gen_relations(self): """Create the <relations> etree element of an RS3 file. This represents all relation types (both 'rst' and 'multinuc'). Example relation: <rel name="circumstance" type="rst" /> """ relations_elem = E('relations') for relname in sorted(self.relations): relations_elem.append( E('rel', OrderedDict([('name', relname), ('type', self.relations[relname])]))) return relations_elem
def function[gen_relations, parameter[self]]: constant[Create the <relations> etree element of an RS3 file. This represents all relation types (both 'rst' and 'multinuc'). Example relation: <rel name="circumstance" type="rst" /> ] variable[relations_elem] assign[=] call[name[E], parameter[constant[relations]]] for taget[name[relname]] in starred[call[name[sorted], parameter[name[self].relations]]] begin[:] call[name[relations_elem].append, parameter[call[name[E], parameter[constant[rel], call[name[OrderedDict], parameter[list[[<ast.Tuple object at 0x7da2047eb400>, <ast.Tuple object at 0x7da18f09f550>]]]]]]]] return[name[relations_elem]]
keyword[def] identifier[gen_relations] ( identifier[self] ): literal[string] identifier[relations_elem] = identifier[E] ( literal[string] ) keyword[for] identifier[relname] keyword[in] identifier[sorted] ( identifier[self] . identifier[relations] ): identifier[relations_elem] . identifier[append] ( identifier[E] ( literal[string] , identifier[OrderedDict] ([( literal[string] , identifier[relname] ),( literal[string] , identifier[self] . identifier[relations] [ identifier[relname] ])]))) keyword[return] identifier[relations_elem]
def gen_relations(self): """Create the <relations> etree element of an RS3 file. This represents all relation types (both 'rst' and 'multinuc'). Example relation: <rel name="circumstance" type="rst" /> """ relations_elem = E('relations') for relname in sorted(self.relations): relations_elem.append(E('rel', OrderedDict([('name', relname), ('type', self.relations[relname])]))) # depends on [control=['for'], data=['relname']] return relations_elem
def set_sds_name(self, name, sdsObj): """ Set name for SDS :param name: Name of SDS :param sdsObj: ScaleIO SDS object :return: POST request response :rtype: Requests POST response object """ # TODO: # Check if object parameters are the correct type, otherwise throw error # UNSURE IF THIS IS CORRECT WAY TO SET SDS NAME self.conn.connection._check_login() sdsNameDict = {'sdsName': name} response = self.conn.connection._do_post("{}/{}{}/{}".format(self.conn.connection._api_url, "instances/Sds::", sdcObj.id, 'action/setSdsName'), json=sdsNameDict) return response
def function[set_sds_name, parameter[self, name, sdsObj]]: constant[ Set name for SDS :param name: Name of SDS :param sdsObj: ScaleIO SDS object :return: POST request response :rtype: Requests POST response object ] call[name[self].conn.connection._check_login, parameter[]] variable[sdsNameDict] assign[=] dictionary[[<ast.Constant object at 0x7da1b245a5f0>], [<ast.Name object at 0x7da1b245aec0>]] variable[response] assign[=] call[name[self].conn.connection._do_post, parameter[call[constant[{}/{}{}/{}].format, parameter[name[self].conn.connection._api_url, constant[instances/Sds::], name[sdcObj].id, constant[action/setSdsName]]]]] return[name[response]]
keyword[def] identifier[set_sds_name] ( identifier[self] , identifier[name] , identifier[sdsObj] ): literal[string] identifier[self] . identifier[conn] . identifier[connection] . identifier[_check_login] () identifier[sdsNameDict] ={ literal[string] : identifier[name] } identifier[response] = identifier[self] . identifier[conn] . identifier[connection] . identifier[_do_post] ( literal[string] . identifier[format] ( identifier[self] . identifier[conn] . identifier[connection] . identifier[_api_url] , literal[string] , identifier[sdcObj] . identifier[id] , literal[string] ), identifier[json] = identifier[sdsNameDict] ) keyword[return] identifier[response]
def set_sds_name(self, name, sdsObj): """ Set name for SDS :param name: Name of SDS :param sdsObj: ScaleIO SDS object :return: POST request response :rtype: Requests POST response object """ # TODO: # Check if object parameters are the correct type, otherwise throw error # UNSURE IF THIS IS CORRECT WAY TO SET SDS NAME self.conn.connection._check_login() sdsNameDict = {'sdsName': name} response = self.conn.connection._do_post('{}/{}{}/{}'.format(self.conn.connection._api_url, 'instances/Sds::', sdcObj.id, 'action/setSdsName'), json=sdsNameDict) return response
def set_expression(self, expression_dict): """Set protein expression amounts as initial conditions Parameters ---------- expression_dict : dict A dictionary in which the keys are gene names and the values are numbers representing the absolute amount (count per cell) of proteins expressed. Proteins that are not expressed can be represented as nan. Entries that are not in the dict or are in there but resolve to None, are set to the default initial amount. Example: {'EGFR': 12345, 'BRAF': 4567, 'ESR1': nan} """ if self.model is None: return monomers_found = [] monomers_notfound = [] # Iterate over all the monomers for m in self.model.monomers: if (m.name in expression_dict and expression_dict[m.name] is not None): # Try to get the expression amount from the dict init = expression_dict[m.name] # We interpret nan and None as not expressed if math.isnan(init): init = 0 init_round = round(init) set_base_initial_condition(self.model, m, init_round) monomers_found.append(m.name) else: set_base_initial_condition(self.model, m, self.default_initial_amount) monomers_notfound.append(m.name) logger.info('Monomers set to given context') logger.info('-----------------------------') for m in monomers_found: logger.info('%s' % m) if monomers_notfound: logger.info('') logger.info('Monomers not found in given context') logger.info('-----------------------------------') for m in monomers_notfound: logger.info('%s' % m)
def function[set_expression, parameter[self, expression_dict]]: constant[Set protein expression amounts as initial conditions Parameters ---------- expression_dict : dict A dictionary in which the keys are gene names and the values are numbers representing the absolute amount (count per cell) of proteins expressed. Proteins that are not expressed can be represented as nan. Entries that are not in the dict or are in there but resolve to None, are set to the default initial amount. Example: {'EGFR': 12345, 'BRAF': 4567, 'ESR1': nan} ] if compare[name[self].model is constant[None]] begin[:] return[None] variable[monomers_found] assign[=] list[[]] variable[monomers_notfound] assign[=] list[[]] for taget[name[m]] in starred[name[self].model.monomers] begin[:] if <ast.BoolOp object at 0x7da18c4cd420> begin[:] variable[init] assign[=] call[name[expression_dict]][name[m].name] if call[name[math].isnan, parameter[name[init]]] begin[:] variable[init] assign[=] constant[0] variable[init_round] assign[=] call[name[round], parameter[name[init]]] call[name[set_base_initial_condition], parameter[name[self].model, name[m], name[init_round]]] call[name[monomers_found].append, parameter[name[m].name]] call[name[logger].info, parameter[constant[Monomers set to given context]]] call[name[logger].info, parameter[constant[-----------------------------]]] for taget[name[m]] in starred[name[monomers_found]] begin[:] call[name[logger].info, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[m]]]] if name[monomers_notfound] begin[:] call[name[logger].info, parameter[constant[]]] call[name[logger].info, parameter[constant[Monomers not found in given context]]] call[name[logger].info, parameter[constant[-----------------------------------]]] for taget[name[m]] in starred[name[monomers_notfound]] begin[:] call[name[logger].info, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[m]]]]
keyword[def] identifier[set_expression] ( identifier[self] , identifier[expression_dict] ): literal[string] keyword[if] identifier[self] . identifier[model] keyword[is] keyword[None] : keyword[return] identifier[monomers_found] =[] identifier[monomers_notfound] =[] keyword[for] identifier[m] keyword[in] identifier[self] . identifier[model] . identifier[monomers] : keyword[if] ( identifier[m] . identifier[name] keyword[in] identifier[expression_dict] keyword[and] identifier[expression_dict] [ identifier[m] . identifier[name] ] keyword[is] keyword[not] keyword[None] ): identifier[init] = identifier[expression_dict] [ identifier[m] . identifier[name] ] keyword[if] identifier[math] . identifier[isnan] ( identifier[init] ): identifier[init] = literal[int] identifier[init_round] = identifier[round] ( identifier[init] ) identifier[set_base_initial_condition] ( identifier[self] . identifier[model] , identifier[m] , identifier[init_round] ) identifier[monomers_found] . identifier[append] ( identifier[m] . identifier[name] ) keyword[else] : identifier[set_base_initial_condition] ( identifier[self] . identifier[model] , identifier[m] , identifier[self] . identifier[default_initial_amount] ) identifier[monomers_notfound] . identifier[append] ( identifier[m] . identifier[name] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[logger] . identifier[info] ( literal[string] ) keyword[for] identifier[m] keyword[in] identifier[monomers_found] : identifier[logger] . identifier[info] ( literal[string] % identifier[m] ) keyword[if] identifier[monomers_notfound] : identifier[logger] . identifier[info] ( literal[string] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[logger] . identifier[info] ( literal[string] ) keyword[for] identifier[m] keyword[in] identifier[monomers_notfound] : identifier[logger] . identifier[info] ( literal[string] % identifier[m] )
def set_expression(self, expression_dict): """Set protein expression amounts as initial conditions Parameters ---------- expression_dict : dict A dictionary in which the keys are gene names and the values are numbers representing the absolute amount (count per cell) of proteins expressed. Proteins that are not expressed can be represented as nan. Entries that are not in the dict or are in there but resolve to None, are set to the default initial amount. Example: {'EGFR': 12345, 'BRAF': 4567, 'ESR1': nan} """ if self.model is None: return # depends on [control=['if'], data=[]] monomers_found = [] monomers_notfound = [] # Iterate over all the monomers for m in self.model.monomers: if m.name in expression_dict and expression_dict[m.name] is not None: # Try to get the expression amount from the dict init = expression_dict[m.name] # We interpret nan and None as not expressed if math.isnan(init): init = 0 # depends on [control=['if'], data=[]] init_round = round(init) set_base_initial_condition(self.model, m, init_round) monomers_found.append(m.name) # depends on [control=['if'], data=[]] else: set_base_initial_condition(self.model, m, self.default_initial_amount) monomers_notfound.append(m.name) # depends on [control=['for'], data=['m']] logger.info('Monomers set to given context') logger.info('-----------------------------') for m in monomers_found: logger.info('%s' % m) # depends on [control=['for'], data=['m']] if monomers_notfound: logger.info('') logger.info('Monomers not found in given context') logger.info('-----------------------------------') for m in monomers_notfound: logger.info('%s' % m) # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]]
def get(self, url, headers=None, parameters=None, get_json=True): ''' Send a GET request with custome headers and parameters Args: url (str): URL to send the request to headers (str, optional): custom headers parameters (str, optional): optional parameters Returns: A JSON object of the returned response if `get_json` is True, Requests' response object otherwise ''' if self.debug: print("GET: %s, headers=%s" % (url, headers)) self.headers = self._get_default_headers() get_parameters = self.parameters if get_parameters is None: # In case self.parameters is still empty get_parameters = {} if headers is not None: self.headers.update(headers) if parameters is not None: get_parameters.update(parameters) response = requests.get(url, headers=self.headers, params=get_parameters, auth=self.auth, verify=self.verify_ssl) json_response = self._process_json_response(response) return json_response if get_json is True else response
def function[get, parameter[self, url, headers, parameters, get_json]]: constant[ Send a GET request with custome headers and parameters Args: url (str): URL to send the request to headers (str, optional): custom headers parameters (str, optional): optional parameters Returns: A JSON object of the returned response if `get_json` is True, Requests' response object otherwise ] if name[self].debug begin[:] call[name[print], parameter[binary_operation[constant[GET: %s, headers=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e38c10>, <ast.Name object at 0x7da1b0e3a830>]]]]] name[self].headers assign[=] call[name[self]._get_default_headers, parameter[]] variable[get_parameters] assign[=] name[self].parameters if compare[name[get_parameters] is constant[None]] begin[:] variable[get_parameters] assign[=] dictionary[[], []] if compare[name[headers] is_not constant[None]] begin[:] call[name[self].headers.update, parameter[name[headers]]] if compare[name[parameters] is_not constant[None]] begin[:] call[name[get_parameters].update, parameter[name[parameters]]] variable[response] assign[=] call[name[requests].get, parameter[name[url]]] variable[json_response] assign[=] call[name[self]._process_json_response, parameter[name[response]]] return[<ast.IfExp object at 0x7da1b0d54a90>]
keyword[def] identifier[get] ( identifier[self] , identifier[url] , identifier[headers] = keyword[None] , identifier[parameters] = keyword[None] , identifier[get_json] = keyword[True] ): literal[string] keyword[if] identifier[self] . identifier[debug] : identifier[print] ( literal[string] %( identifier[url] , identifier[headers] )) identifier[self] . identifier[headers] = identifier[self] . identifier[_get_default_headers] () identifier[get_parameters] = identifier[self] . identifier[parameters] keyword[if] identifier[get_parameters] keyword[is] keyword[None] : identifier[get_parameters] ={} keyword[if] identifier[headers] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[headers] . identifier[update] ( identifier[headers] ) keyword[if] identifier[parameters] keyword[is] keyword[not] keyword[None] : identifier[get_parameters] . identifier[update] ( identifier[parameters] ) identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[self] . identifier[headers] , identifier[params] = identifier[get_parameters] , identifier[auth] = identifier[self] . identifier[auth] , identifier[verify] = identifier[self] . identifier[verify_ssl] ) identifier[json_response] = identifier[self] . identifier[_process_json_response] ( identifier[response] ) keyword[return] identifier[json_response] keyword[if] identifier[get_json] keyword[is] keyword[True] keyword[else] identifier[response]
def get(self, url, headers=None, parameters=None, get_json=True): """ Send a GET request with custome headers and parameters Args: url (str): URL to send the request to headers (str, optional): custom headers parameters (str, optional): optional parameters Returns: A JSON object of the returned response if `get_json` is True, Requests' response object otherwise """ if self.debug: print('GET: %s, headers=%s' % (url, headers)) # depends on [control=['if'], data=[]] self.headers = self._get_default_headers() get_parameters = self.parameters if get_parameters is None: # In case self.parameters is still empty get_parameters = {} # depends on [control=['if'], data=['get_parameters']] if headers is not None: self.headers.update(headers) # depends on [control=['if'], data=['headers']] if parameters is not None: get_parameters.update(parameters) # depends on [control=['if'], data=['parameters']] response = requests.get(url, headers=self.headers, params=get_parameters, auth=self.auth, verify=self.verify_ssl) json_response = self._process_json_response(response) return json_response if get_json is True else response
def get_status(self): """ Returns the status of this router :returns: inactive, shutting down, running or suspended. """ status = yield from self._hypervisor.send('vm get_status "{name}"'.format(name=self._name)) if len(status) == 0: raise DynamipsError("Can't get vm {name} status".format(name=self._name)) return self._status[int(status[0])]
def function[get_status, parameter[self]]: constant[ Returns the status of this router :returns: inactive, shutting down, running or suspended. ] variable[status] assign[=] <ast.YieldFrom object at 0x7da18f7231c0> if compare[call[name[len], parameter[name[status]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18f721b10> return[call[name[self]._status][call[name[int], parameter[call[name[status]][constant[0]]]]]]
keyword[def] identifier[get_status] ( identifier[self] ): literal[string] identifier[status] = keyword[yield] keyword[from] identifier[self] . identifier[_hypervisor] . identifier[send] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] )) keyword[if] identifier[len] ( identifier[status] )== literal[int] : keyword[raise] identifier[DynamipsError] ( literal[string] . identifier[format] ( identifier[name] = identifier[self] . identifier[_name] )) keyword[return] identifier[self] . identifier[_status] [ identifier[int] ( identifier[status] [ literal[int] ])]
def get_status(self): """ Returns the status of this router :returns: inactive, shutting down, running or suspended. """ status = (yield from self._hypervisor.send('vm get_status "{name}"'.format(name=self._name))) if len(status) == 0: raise DynamipsError("Can't get vm {name} status".format(name=self._name)) # depends on [control=['if'], data=[]] return self._status[int(status[0])]
def listFormats(self, vendorSpecific=None): """See Also: listFormatsResponse() Args: vendorSpecific: Returns: """ response = self.listFormatsResponse(vendorSpecific) return self._read_dataone_type_response(response, 'ObjectFormatList')
def function[listFormats, parameter[self, vendorSpecific]]: constant[See Also: listFormatsResponse() Args: vendorSpecific: Returns: ] variable[response] assign[=] call[name[self].listFormatsResponse, parameter[name[vendorSpecific]]] return[call[name[self]._read_dataone_type_response, parameter[name[response], constant[ObjectFormatList]]]]
keyword[def] identifier[listFormats] ( identifier[self] , identifier[vendorSpecific] = keyword[None] ): literal[string] identifier[response] = identifier[self] . identifier[listFormatsResponse] ( identifier[vendorSpecific] ) keyword[return] identifier[self] . identifier[_read_dataone_type_response] ( identifier[response] , literal[string] )
def listFormats(self, vendorSpecific=None): """See Also: listFormatsResponse() Args: vendorSpecific: Returns: """ response = self.listFormatsResponse(vendorSpecific) return self._read_dataone_type_response(response, 'ObjectFormatList')
def from_jd(jd): '''Calculate Mayan long count from Julian day''' d = jd - EPOCH baktun = trunc(d / 144000) d = (d % 144000) katun = trunc(d / 7200) d = (d % 7200) tun = trunc(d / 360) d = (d % 360) uinal = trunc(d / 20) kin = int((d % 20)) return (baktun, katun, tun, uinal, kin)
def function[from_jd, parameter[jd]]: constant[Calculate Mayan long count from Julian day] variable[d] assign[=] binary_operation[name[jd] - name[EPOCH]] variable[baktun] assign[=] call[name[trunc], parameter[binary_operation[name[d] / constant[144000]]]] variable[d] assign[=] binary_operation[name[d] <ast.Mod object at 0x7da2590d6920> constant[144000]] variable[katun] assign[=] call[name[trunc], parameter[binary_operation[name[d] / constant[7200]]]] variable[d] assign[=] binary_operation[name[d] <ast.Mod object at 0x7da2590d6920> constant[7200]] variable[tun] assign[=] call[name[trunc], parameter[binary_operation[name[d] / constant[360]]]] variable[d] assign[=] binary_operation[name[d] <ast.Mod object at 0x7da2590d6920> constant[360]] variable[uinal] assign[=] call[name[trunc], parameter[binary_operation[name[d] / constant[20]]]] variable[kin] assign[=] call[name[int], parameter[binary_operation[name[d] <ast.Mod object at 0x7da2590d6920> constant[20]]]] return[tuple[[<ast.Name object at 0x7da1b0ff8730>, <ast.Name object at 0x7da1b0ffb520>, <ast.Name object at 0x7da1b0ffaa40>, <ast.Name object at 0x7da1b0eacee0>, <ast.Name object at 0x7da1b0eafa30>]]]
keyword[def] identifier[from_jd] ( identifier[jd] ): literal[string] identifier[d] = identifier[jd] - identifier[EPOCH] identifier[baktun] = identifier[trunc] ( identifier[d] / literal[int] ) identifier[d] =( identifier[d] % literal[int] ) identifier[katun] = identifier[trunc] ( identifier[d] / literal[int] ) identifier[d] =( identifier[d] % literal[int] ) identifier[tun] = identifier[trunc] ( identifier[d] / literal[int] ) identifier[d] =( identifier[d] % literal[int] ) identifier[uinal] = identifier[trunc] ( identifier[d] / literal[int] ) identifier[kin] = identifier[int] (( identifier[d] % literal[int] )) keyword[return] ( identifier[baktun] , identifier[katun] , identifier[tun] , identifier[uinal] , identifier[kin] )
def from_jd(jd): """Calculate Mayan long count from Julian day""" d = jd - EPOCH baktun = trunc(d / 144000) d = d % 144000 katun = trunc(d / 7200) d = d % 7200 tun = trunc(d / 360) d = d % 360 uinal = trunc(d / 20) kin = int(d % 20) return (baktun, katun, tun, uinal, kin)
def sanitize_for_archive(url, headers, payload): """Sanitize payload of a HTTP request by removing the token information before storing/retrieving archived items :param: url: HTTP url request :param: headers: HTTP headers request :param: payload: HTTP payload request :returns url, headers and the sanitized payload """ if 'key' in payload: payload.pop('key') return url, headers, payload
def function[sanitize_for_archive, parameter[url, headers, payload]]: constant[Sanitize payload of a HTTP request by removing the token information before storing/retrieving archived items :param: url: HTTP url request :param: headers: HTTP headers request :param: payload: HTTP payload request :returns url, headers and the sanitized payload ] if compare[constant[key] in name[payload]] begin[:] call[name[payload].pop, parameter[constant[key]]] return[tuple[[<ast.Name object at 0x7da1b0338c70>, <ast.Name object at 0x7da1b033a710>, <ast.Name object at 0x7da1b033a9b0>]]]
keyword[def] identifier[sanitize_for_archive] ( identifier[url] , identifier[headers] , identifier[payload] ): literal[string] keyword[if] literal[string] keyword[in] identifier[payload] : identifier[payload] . identifier[pop] ( literal[string] ) keyword[return] identifier[url] , identifier[headers] , identifier[payload]
def sanitize_for_archive(url, headers, payload): """Sanitize payload of a HTTP request by removing the token information before storing/retrieving archived items :param: url: HTTP url request :param: headers: HTTP headers request :param: payload: HTTP payload request :returns url, headers and the sanitized payload """ if 'key' in payload: payload.pop('key') # depends on [control=['if'], data=['payload']] return (url, headers, payload)
def list_expiration_dates(self, base='roles/all/ssl'): """ Scans through all local .crt files and displays the expiration dates. """ max_fn_len = 0 max_date_len = 0 data = [] for fn in os.listdir(base): fqfn = os.path.join(base, fn) if not os.path.isfile(fqfn): continue if not fn.endswith('.crt'): continue expiration_date = self.get_expiration_date(fqfn) max_fn_len = max(max_fn_len, len(fn)) max_date_len = max(max_date_len, len(str(expiration_date))) data.append((fn, expiration_date)) print('%s %s %s' % ('Filename'.ljust(max_fn_len), 'Expiration Date'.ljust(max_date_len), 'Expired')) now = datetime.now().replace(tzinfo=pytz.UTC) for fn, dt in sorted(data): if dt is None: expired = '?' elif dt < now: expired = 'YES' else: expired = 'NO' print('%s %s %s' % (fn.ljust(max_fn_len), str(dt).ljust(max_date_len), expired))
def function[list_expiration_dates, parameter[self, base]]: constant[ Scans through all local .crt files and displays the expiration dates. ] variable[max_fn_len] assign[=] constant[0] variable[max_date_len] assign[=] constant[0] variable[data] assign[=] list[[]] for taget[name[fn]] in starred[call[name[os].listdir, parameter[name[base]]]] begin[:] variable[fqfn] assign[=] call[name[os].path.join, parameter[name[base], name[fn]]] if <ast.UnaryOp object at 0x7da1b00e3190> begin[:] continue if <ast.UnaryOp object at 0x7da1b00e2500> begin[:] continue variable[expiration_date] assign[=] call[name[self].get_expiration_date, parameter[name[fqfn]]] variable[max_fn_len] assign[=] call[name[max], parameter[name[max_fn_len], call[name[len], parameter[name[fn]]]]] variable[max_date_len] assign[=] call[name[max], parameter[name[max_date_len], call[name[len], parameter[call[name[str], parameter[name[expiration_date]]]]]]] call[name[data].append, parameter[tuple[[<ast.Name object at 0x7da1b00eafe0>, <ast.Name object at 0x7da1b00ebdf0>]]]] call[name[print], parameter[binary_operation[constant[%s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b00eb9d0>, <ast.Call object at 0x7da1b00ea1d0>, <ast.Constant object at 0x7da1b00eb730>]]]]] variable[now] assign[=] call[call[name[datetime].now, parameter[]].replace, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b00e9ea0>, <ast.Name object at 0x7da1b00e8850>]]] in starred[call[name[sorted], parameter[name[data]]]] begin[:] if compare[name[dt] is constant[None]] begin[:] variable[expired] assign[=] constant[?] call[name[print], parameter[binary_operation[constant[%s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b0088af0>, <ast.Call object at 0x7da1b00882e0>, <ast.Name object at 0x7da1b008b100>]]]]]
keyword[def] identifier[list_expiration_dates] ( identifier[self] , identifier[base] = literal[string] ): literal[string] identifier[max_fn_len] = literal[int] identifier[max_date_len] = literal[int] identifier[data] =[] keyword[for] identifier[fn] keyword[in] identifier[os] . identifier[listdir] ( identifier[base] ): identifier[fqfn] = identifier[os] . identifier[path] . identifier[join] ( identifier[base] , identifier[fn] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fqfn] ): keyword[continue] keyword[if] keyword[not] identifier[fn] . identifier[endswith] ( literal[string] ): keyword[continue] identifier[expiration_date] = identifier[self] . identifier[get_expiration_date] ( identifier[fqfn] ) identifier[max_fn_len] = identifier[max] ( identifier[max_fn_len] , identifier[len] ( identifier[fn] )) identifier[max_date_len] = identifier[max] ( identifier[max_date_len] , identifier[len] ( identifier[str] ( identifier[expiration_date] ))) identifier[data] . identifier[append] (( identifier[fn] , identifier[expiration_date] )) identifier[print] ( literal[string] %( literal[string] . identifier[ljust] ( identifier[max_fn_len] ), literal[string] . identifier[ljust] ( identifier[max_date_len] ), literal[string] )) identifier[now] = identifier[datetime] . identifier[now] (). identifier[replace] ( identifier[tzinfo] = identifier[pytz] . identifier[UTC] ) keyword[for] identifier[fn] , identifier[dt] keyword[in] identifier[sorted] ( identifier[data] ): keyword[if] identifier[dt] keyword[is] keyword[None] : identifier[expired] = literal[string] keyword[elif] identifier[dt] < identifier[now] : identifier[expired] = literal[string] keyword[else] : identifier[expired] = literal[string] identifier[print] ( literal[string] %( identifier[fn] . identifier[ljust] ( identifier[max_fn_len] ), identifier[str] ( identifier[dt] ). identifier[ljust] ( identifier[max_date_len] ), identifier[expired] ))
def list_expiration_dates(self, base='roles/all/ssl'): """ Scans through all local .crt files and displays the expiration dates. """ max_fn_len = 0 max_date_len = 0 data = [] for fn in os.listdir(base): fqfn = os.path.join(base, fn) if not os.path.isfile(fqfn): continue # depends on [control=['if'], data=[]] if not fn.endswith('.crt'): continue # depends on [control=['if'], data=[]] expiration_date = self.get_expiration_date(fqfn) max_fn_len = max(max_fn_len, len(fn)) max_date_len = max(max_date_len, len(str(expiration_date))) data.append((fn, expiration_date)) # depends on [control=['for'], data=['fn']] print('%s %s %s' % ('Filename'.ljust(max_fn_len), 'Expiration Date'.ljust(max_date_len), 'Expired')) now = datetime.now().replace(tzinfo=pytz.UTC) for (fn, dt) in sorted(data): if dt is None: expired = '?' # depends on [control=['if'], data=[]] elif dt < now: expired = 'YES' # depends on [control=['if'], data=[]] else: expired = 'NO' print('%s %s %s' % (fn.ljust(max_fn_len), str(dt).ljust(max_date_len), expired)) # depends on [control=['for'], data=[]]
def forwards(self, orm): "Write your forwards methods here." for a in orm.Article.objects.all(): if a.updated: a.last_updated = a.updated a.save(force_update=True)
def function[forwards, parameter[self, orm]]: constant[Write your forwards methods here.] for taget[name[a]] in starred[call[name[orm].Article.objects.all, parameter[]]] begin[:] if name[a].updated begin[:] name[a].last_updated assign[=] name[a].updated call[name[a].save, parameter[]]
keyword[def] identifier[forwards] ( identifier[self] , identifier[orm] ): literal[string] keyword[for] identifier[a] keyword[in] identifier[orm] . identifier[Article] . identifier[objects] . identifier[all] (): keyword[if] identifier[a] . identifier[updated] : identifier[a] . identifier[last_updated] = identifier[a] . identifier[updated] identifier[a] . identifier[save] ( identifier[force_update] = keyword[True] )
def forwards(self, orm): """Write your forwards methods here.""" for a in orm.Article.objects.all(): if a.updated: a.last_updated = a.updated a.save(force_update=True) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']]
def sigma_to_pressure(sigma, psfc, ptop): r"""Calculate pressure from sigma values. Parameters ---------- sigma : ndarray The sigma levels to be converted to pressure levels. psfc : `pint.Quantity` The surface pressure value. ptop : `pint.Quantity` The pressure value at the top of the model domain. Returns ------- `pint.Quantity` The pressure values at the given sigma levels. Notes ----- Sigma definition adapted from [Philips1957]_. .. math:: p = \sigma * (p_{sfc} - p_{top}) + p_{top} * :math:`p` is pressure at a given `\sigma` level * :math:`\sigma` is non-dimensional, scaled pressure * :math:`p_{sfc}` is pressure at the surface or model floor * :math:`p_{top}` is pressure at the top of the model domain """ if np.any(sigma < 0) or np.any(sigma > 1): raise ValueError('Sigma values should be bounded by 0 and 1') if psfc.magnitude < 0 or ptop.magnitude < 0: raise ValueError('Pressure input should be non-negative') return sigma * (psfc - ptop) + ptop
def function[sigma_to_pressure, parameter[sigma, psfc, ptop]]: constant[Calculate pressure from sigma values. Parameters ---------- sigma : ndarray The sigma levels to be converted to pressure levels. psfc : `pint.Quantity` The surface pressure value. ptop : `pint.Quantity` The pressure value at the top of the model domain. Returns ------- `pint.Quantity` The pressure values at the given sigma levels. Notes ----- Sigma definition adapted from [Philips1957]_. .. math:: p = \sigma * (p_{sfc} - p_{top}) + p_{top} * :math:`p` is pressure at a given `\sigma` level * :math:`\sigma` is non-dimensional, scaled pressure * :math:`p_{sfc}` is pressure at the surface or model floor * :math:`p_{top}` is pressure at the top of the model domain ] if <ast.BoolOp object at 0x7da1b2219780> begin[:] <ast.Raise object at 0x7da1b22b9b10> if <ast.BoolOp object at 0x7da1b22ba680> begin[:] <ast.Raise object at 0x7da1b22bbd90> return[binary_operation[binary_operation[name[sigma] * binary_operation[name[psfc] - name[ptop]]] + name[ptop]]]
keyword[def] identifier[sigma_to_pressure] ( identifier[sigma] , identifier[psfc] , identifier[ptop] ): literal[string] keyword[if] identifier[np] . identifier[any] ( identifier[sigma] < literal[int] ) keyword[or] identifier[np] . identifier[any] ( identifier[sigma] > literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[psfc] . identifier[magnitude] < literal[int] keyword[or] identifier[ptop] . identifier[magnitude] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[sigma] *( identifier[psfc] - identifier[ptop] )+ identifier[ptop]
def sigma_to_pressure(sigma, psfc, ptop): """Calculate pressure from sigma values. Parameters ---------- sigma : ndarray The sigma levels to be converted to pressure levels. psfc : `pint.Quantity` The surface pressure value. ptop : `pint.Quantity` The pressure value at the top of the model domain. Returns ------- `pint.Quantity` The pressure values at the given sigma levels. Notes ----- Sigma definition adapted from [Philips1957]_. .. math:: p = \\sigma * (p_{sfc} - p_{top}) + p_{top} * :math:`p` is pressure at a given `\\sigma` level * :math:`\\sigma` is non-dimensional, scaled pressure * :math:`p_{sfc}` is pressure at the surface or model floor * :math:`p_{top}` is pressure at the top of the model domain """ if np.any(sigma < 0) or np.any(sigma > 1): raise ValueError('Sigma values should be bounded by 0 and 1') # depends on [control=['if'], data=[]] if psfc.magnitude < 0 or ptop.magnitude < 0: raise ValueError('Pressure input should be non-negative') # depends on [control=['if'], data=[]] return sigma * (psfc - ptop) + ptop
def get_ips(self, instance_id): """ Retrieves all IP addresses associated to a given instance. :return: tuple (IPs) """ self._init_az_api() cluster_name, node_name = instance_id # XXX: keep in sync with contents of `vm_deployment_template` ip_name = ('{node_name}-public-ip'.format(node_name=node_name)) ip = self._network_client.public_ip_addresses.get(cluster_name, ip_name) if (ip.provisioning_state == 'Succeeded' and ip.ip_address): return [ip.ip_address] else: return []
def function[get_ips, parameter[self, instance_id]]: constant[ Retrieves all IP addresses associated to a given instance. :return: tuple (IPs) ] call[name[self]._init_az_api, parameter[]] <ast.Tuple object at 0x7da1b088a8f0> assign[=] name[instance_id] variable[ip_name] assign[=] call[constant[{node_name}-public-ip].format, parameter[]] variable[ip] assign[=] call[name[self]._network_client.public_ip_addresses.get, parameter[name[cluster_name], name[ip_name]]] if <ast.BoolOp object at 0x7da1b060ca60> begin[:] return[list[[<ast.Attribute object at 0x7da1b060c8e0>]]]
keyword[def] identifier[get_ips] ( identifier[self] , identifier[instance_id] ): literal[string] identifier[self] . identifier[_init_az_api] () identifier[cluster_name] , identifier[node_name] = identifier[instance_id] identifier[ip_name] =( literal[string] . identifier[format] ( identifier[node_name] = identifier[node_name] )) identifier[ip] = identifier[self] . identifier[_network_client] . identifier[public_ip_addresses] . identifier[get] ( identifier[cluster_name] , identifier[ip_name] ) keyword[if] ( identifier[ip] . identifier[provisioning_state] == literal[string] keyword[and] identifier[ip] . identifier[ip_address] ): keyword[return] [ identifier[ip] . identifier[ip_address] ] keyword[else] : keyword[return] []
def get_ips(self, instance_id): """ Retrieves all IP addresses associated to a given instance. :return: tuple (IPs) """ self._init_az_api() (cluster_name, node_name) = instance_id # XXX: keep in sync with contents of `vm_deployment_template` ip_name = '{node_name}-public-ip'.format(node_name=node_name) ip = self._network_client.public_ip_addresses.get(cluster_name, ip_name) if ip.provisioning_state == 'Succeeded' and ip.ip_address: return [ip.ip_address] # depends on [control=['if'], data=[]] else: return []
def _clean_ctx(self): """ Clears and deallocates context """ try: if self.ctx is not None: libcrypto.EVP_MD_CTX_free(self.ctx) del self.ctx except AttributeError: pass self.digest_out = None self.digest_finalized = False
def function[_clean_ctx, parameter[self]]: constant[ Clears and deallocates context ] <ast.Try object at 0x7da1b28b9210> name[self].digest_out assign[=] constant[None] name[self].digest_finalized assign[=] constant[False]
keyword[def] identifier[_clean_ctx] ( identifier[self] ): literal[string] keyword[try] : keyword[if] identifier[self] . identifier[ctx] keyword[is] keyword[not] keyword[None] : identifier[libcrypto] . identifier[EVP_MD_CTX_free] ( identifier[self] . identifier[ctx] ) keyword[del] identifier[self] . identifier[ctx] keyword[except] identifier[AttributeError] : keyword[pass] identifier[self] . identifier[digest_out] = keyword[None] identifier[self] . identifier[digest_finalized] = keyword[False]
def _clean_ctx(self): """ Clears and deallocates context """ try: if self.ctx is not None: libcrypto.EVP_MD_CTX_free(self.ctx) del self.ctx # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] self.digest_out = None self.digest_finalized = False
def set_or_clear_breakpoint(self): """Set/Clear breakpoint""" editorstack = self.get_current_editorstack() if editorstack is not None: self.switch_to_plugin() editorstack.set_or_clear_breakpoint()
def function[set_or_clear_breakpoint, parameter[self]]: constant[Set/Clear breakpoint] variable[editorstack] assign[=] call[name[self].get_current_editorstack, parameter[]] if compare[name[editorstack] is_not constant[None]] begin[:] call[name[self].switch_to_plugin, parameter[]] call[name[editorstack].set_or_clear_breakpoint, parameter[]]
keyword[def] identifier[set_or_clear_breakpoint] ( identifier[self] ): literal[string] identifier[editorstack] = identifier[self] . identifier[get_current_editorstack] () keyword[if] identifier[editorstack] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[switch_to_plugin] () identifier[editorstack] . identifier[set_or_clear_breakpoint] ()
def set_or_clear_breakpoint(self): """Set/Clear breakpoint""" editorstack = self.get_current_editorstack() if editorstack is not None: self.switch_to_plugin() editorstack.set_or_clear_breakpoint() # depends on [control=['if'], data=['editorstack']]
def workflow_describe(object_id, input_params={}, always_retry=True, **kwargs): """ Invokes the /workflow-xxxx/describe API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fdescribe """ return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def function[workflow_describe, parameter[object_id, input_params, always_retry]]: constant[ Invokes the /workflow-xxxx/describe API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fdescribe ] return[call[name[DXHTTPRequest], parameter[binary_operation[constant[/%s/describe] <ast.Mod object at 0x7da2590d6920> name[object_id]], name[input_params]]]]
keyword[def] identifier[workflow_describe] ( identifier[object_id] , identifier[input_params] ={}, identifier[always_retry] = keyword[True] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[DXHTTPRequest] ( literal[string] % identifier[object_id] , identifier[input_params] , identifier[always_retry] = identifier[always_retry] ,** identifier[kwargs] )
def workflow_describe(object_id, input_params={}, always_retry=True, **kwargs): """ Invokes the /workflow-xxxx/describe API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Workflows-and-Analyses#API-method%3A-%2Fworkflow-xxxx%2Fdescribe """ return DXHTTPRequest('/%s/describe' % object_id, input_params, always_retry=always_retry, **kwargs)
def run_supernova_keyring(ctx, action, environment, parameter): """ Sets or retrieves credentials stored in your system's keyring using the python-keyring module. Global credentials can be shared between multiple configuration sections: \b [prod] OS_PASSWORD=USE_KEYRING['sso_password'] ... \b [staging] OS_PASSWORD=USE_KEYRING['my sso_password'] ... You could retrieve or set the global credential using these commands: \b supernova -g global sso_password <= get the credential supernova -s global sso_password <= set the credential Local credentials are intended for use with only one configuration section: \b [prod] OS_PASSWORD=USE_KEYRING ... \b [staging] OS_PASSWORD=USE_KEYRING ... You could retrieve or set the local credential using these commands: \b supernova -g prod OS_PASSWORD <= get the credential for prod supernova -s prod OS_PASSWORD <= set the credential for prod \b supernova -g staging OS_PASSWORD <= get the credential for staging supernova -s staging OS_PASSWORD <= set the credential for staging Full documentation: \b http://supernova.readthedocs.org/en/latest/configuring/ """ if action == 'get_credential': result = credentials.get_user_password(env=environment, param=parameter) if not result: click.echo("\nUnable to find a credential matching the data " "provided.") ctx.exit(1) else: click.echo("\nFound credential for {0}: {1}".format(*result)) ctx.exit() elif action == 'set_credential': msg = """ Preparing to set a credential in the keyring for: - Environment : {0} - Parameter : {1} If this is correct, enter the corresponding credential to store in your keyring or press CTRL-C to abort""".format(environment, parameter) credential = click.prompt(text=msg, hide_input=True) result = credentials.set_user_password(environment=environment, parameter=parameter, password=credential) if result: click.echo("\nSuccessfully stored.") ctx.exit() else: click.echo("\nUnable to store your credential.") ctx.exit(1) else: click.secho("ERROR: must specify --get or --set", bold=True) click.echo(ctx.get_help()) ctx.exit()
def function[run_supernova_keyring, parameter[ctx, action, environment, parameter]]: constant[ Sets or retrieves credentials stored in your system's keyring using the python-keyring module. Global credentials can be shared between multiple configuration sections:  [prod] OS_PASSWORD=USE_KEYRING['sso_password'] ...  [staging] OS_PASSWORD=USE_KEYRING['my sso_password'] ... You could retrieve or set the global credential using these commands:  supernova -g global sso_password <= get the credential supernova -s global sso_password <= set the credential Local credentials are intended for use with only one configuration section:  [prod] OS_PASSWORD=USE_KEYRING ...  [staging] OS_PASSWORD=USE_KEYRING ... You could retrieve or set the local credential using these commands:  supernova -g prod OS_PASSWORD <= get the credential for prod supernova -s prod OS_PASSWORD <= set the credential for prod  supernova -g staging OS_PASSWORD <= get the credential for staging supernova -s staging OS_PASSWORD <= set the credential for staging Full documentation:  http://supernova.readthedocs.org/en/latest/configuring/ ] if compare[name[action] equal[==] constant[get_credential]] begin[:] variable[result] assign[=] call[name[credentials].get_user_password, parameter[]] if <ast.UnaryOp object at 0x7da1b2867c70> begin[:] call[name[click].echo, parameter[constant[ Unable to find a credential matching the data provided.]]] call[name[ctx].exit, parameter[constant[1]]]
keyword[def] identifier[run_supernova_keyring] ( identifier[ctx] , identifier[action] , identifier[environment] , identifier[parameter] ): literal[string] keyword[if] identifier[action] == literal[string] : identifier[result] = identifier[credentials] . identifier[get_user_password] ( identifier[env] = identifier[environment] , identifier[param] = identifier[parameter] ) keyword[if] keyword[not] identifier[result] : identifier[click] . identifier[echo] ( literal[string] literal[string] ) identifier[ctx] . identifier[exit] ( literal[int] ) keyword[else] : identifier[click] . identifier[echo] ( literal[string] . identifier[format] (* identifier[result] )) identifier[ctx] . identifier[exit] () keyword[elif] identifier[action] == literal[string] : identifier[msg] = literal[string] . identifier[format] ( identifier[environment] , identifier[parameter] ) identifier[credential] = identifier[click] . identifier[prompt] ( identifier[text] = identifier[msg] , identifier[hide_input] = keyword[True] ) identifier[result] = identifier[credentials] . identifier[set_user_password] ( identifier[environment] = identifier[environment] , identifier[parameter] = identifier[parameter] , identifier[password] = identifier[credential] ) keyword[if] identifier[result] : identifier[click] . identifier[echo] ( literal[string] ) identifier[ctx] . identifier[exit] () keyword[else] : identifier[click] . identifier[echo] ( literal[string] ) identifier[ctx] . identifier[exit] ( literal[int] ) keyword[else] : identifier[click] . identifier[secho] ( literal[string] , identifier[bold] = keyword[True] ) identifier[click] . identifier[echo] ( identifier[ctx] . identifier[get_help] ()) identifier[ctx] . identifier[exit] ()
def run_supernova_keyring(ctx, action, environment, parameter): """ Sets or retrieves credentials stored in your system's keyring using the python-keyring module. Global credentials can be shared between multiple configuration sections: \x08 [prod] OS_PASSWORD=USE_KEYRING['sso_password'] ... \x08 [staging] OS_PASSWORD=USE_KEYRING['my sso_password'] ... You could retrieve or set the global credential using these commands: \x08 supernova -g global sso_password <= get the credential supernova -s global sso_password <= set the credential Local credentials are intended for use with only one configuration section: \x08 [prod] OS_PASSWORD=USE_KEYRING ... \x08 [staging] OS_PASSWORD=USE_KEYRING ... You could retrieve or set the local credential using these commands: \x08 supernova -g prod OS_PASSWORD <= get the credential for prod supernova -s prod OS_PASSWORD <= set the credential for prod \x08 supernova -g staging OS_PASSWORD <= get the credential for staging supernova -s staging OS_PASSWORD <= set the credential for staging Full documentation: \x08 http://supernova.readthedocs.org/en/latest/configuring/ """ if action == 'get_credential': result = credentials.get_user_password(env=environment, param=parameter) if not result: click.echo('\nUnable to find a credential matching the data provided.') ctx.exit(1) # depends on [control=['if'], data=[]] else: click.echo('\nFound credential for {0}: {1}'.format(*result)) ctx.exit() # depends on [control=['if'], data=[]] elif action == 'set_credential': msg = '\nPreparing to set a credential in the keyring for:\n\n - Environment : {0}\n - Parameter : {1}\n\nIf this is correct, enter the corresponding credential to store in your keyring\nor press CTRL-C to abort'.format(environment, parameter) credential = click.prompt(text=msg, hide_input=True) result = credentials.set_user_password(environment=environment, parameter=parameter, password=credential) if result: click.echo('\nSuccessfully stored.') ctx.exit() # depends on [control=['if'], data=[]] else: click.echo('\nUnable to store your credential.') ctx.exit(1) # depends on [control=['if'], data=[]] else: click.secho('ERROR: must specify --get or --set', bold=True) click.echo(ctx.get_help()) ctx.exit()
def convert_acquire(self, shift, instruction): """Return converted `AcquireInstruction`. Args: shift(int): Offset time. instruction (AcquireInstruction): acquire instruction. Returns: dict: Dictionary of required parameters. """ meas_level = self._run_config.get('meas_level', 2) command_dict = { 'name': 'acquire', 't0': shift+instruction.start_time, 'duration': instruction.duration, 'qubits': [q.index for q in instruction.acquires], 'memory_slot': [m.index for m in instruction.mem_slots] } if meas_level == 2: # setup discriminators if instruction.command.discriminator: command_dict.update({ 'discriminators': [ QobjMeasurementOption( name=instruction.command.discriminator.name, params=instruction.command.discriminator.params) ] }) # setup register_slots command_dict.update({ 'register_slot': [regs.index for regs in instruction.reg_slots] }) if meas_level >= 1: # setup kernels if instruction.command.kernel: command_dict.update({ 'kernels': [ QobjMeasurementOption( name=instruction.command.kernel.name, params=instruction.command.kernel.params) ] }) return self._qobj_model(**command_dict)
def function[convert_acquire, parameter[self, shift, instruction]]: constant[Return converted `AcquireInstruction`. Args: shift(int): Offset time. instruction (AcquireInstruction): acquire instruction. Returns: dict: Dictionary of required parameters. ] variable[meas_level] assign[=] call[name[self]._run_config.get, parameter[constant[meas_level], constant[2]]] variable[command_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b0383a00>, <ast.Constant object at 0x7da1b0383670>, <ast.Constant object at 0x7da1b0383610>, <ast.Constant object at 0x7da1b0380fa0>, <ast.Constant object at 0x7da1b0382350>], [<ast.Constant object at 0x7da1b0381300>, <ast.BinOp object at 0x7da1b0382fe0>, <ast.Attribute object at 0x7da1b0383940>, <ast.ListComp object at 0x7da1b0382740>, <ast.ListComp object at 0x7da1b0381f90>]] if compare[name[meas_level] equal[==] constant[2]] begin[:] if name[instruction].command.discriminator begin[:] call[name[command_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0383370>], [<ast.List object at 0x7da1b0383430>]]]] call[name[command_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da1b03a5060>], [<ast.ListComp object at 0x7da1b03a4610>]]]] if compare[name[meas_level] greater_or_equal[>=] constant[1]] begin[:] if name[instruction].command.kernel begin[:] call[name[command_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da1b03a4fa0>], [<ast.List object at 0x7da1b03a5f60>]]]] return[call[name[self]._qobj_model, parameter[]]]
keyword[def] identifier[convert_acquire] ( identifier[self] , identifier[shift] , identifier[instruction] ): literal[string] identifier[meas_level] = identifier[self] . identifier[_run_config] . identifier[get] ( literal[string] , literal[int] ) identifier[command_dict] ={ literal[string] : literal[string] , literal[string] : identifier[shift] + identifier[instruction] . identifier[start_time] , literal[string] : identifier[instruction] . identifier[duration] , literal[string] :[ identifier[q] . identifier[index] keyword[for] identifier[q] keyword[in] identifier[instruction] . identifier[acquires] ], literal[string] :[ identifier[m] . identifier[index] keyword[for] identifier[m] keyword[in] identifier[instruction] . identifier[mem_slots] ] } keyword[if] identifier[meas_level] == literal[int] : keyword[if] identifier[instruction] . identifier[command] . identifier[discriminator] : identifier[command_dict] . identifier[update] ({ literal[string] :[ identifier[QobjMeasurementOption] ( identifier[name] = identifier[instruction] . identifier[command] . identifier[discriminator] . identifier[name] , identifier[params] = identifier[instruction] . identifier[command] . identifier[discriminator] . identifier[params] ) ] }) identifier[command_dict] . identifier[update] ({ literal[string] :[ identifier[regs] . identifier[index] keyword[for] identifier[regs] keyword[in] identifier[instruction] . identifier[reg_slots] ] }) keyword[if] identifier[meas_level] >= literal[int] : keyword[if] identifier[instruction] . identifier[command] . identifier[kernel] : identifier[command_dict] . identifier[update] ({ literal[string] :[ identifier[QobjMeasurementOption] ( identifier[name] = identifier[instruction] . identifier[command] . identifier[kernel] . identifier[name] , identifier[params] = identifier[instruction] . identifier[command] . identifier[kernel] . identifier[params] ) ] }) keyword[return] identifier[self] . identifier[_qobj_model] (** identifier[command_dict] )
def convert_acquire(self, shift, instruction): """Return converted `AcquireInstruction`. Args: shift(int): Offset time. instruction (AcquireInstruction): acquire instruction. Returns: dict: Dictionary of required parameters. """ meas_level = self._run_config.get('meas_level', 2) command_dict = {'name': 'acquire', 't0': shift + instruction.start_time, 'duration': instruction.duration, 'qubits': [q.index for q in instruction.acquires], 'memory_slot': [m.index for m in instruction.mem_slots]} if meas_level == 2: # setup discriminators if instruction.command.discriminator: command_dict.update({'discriminators': [QobjMeasurementOption(name=instruction.command.discriminator.name, params=instruction.command.discriminator.params)]}) # depends on [control=['if'], data=[]] # setup register_slots command_dict.update({'register_slot': [regs.index for regs in instruction.reg_slots]}) # depends on [control=['if'], data=[]] if meas_level >= 1: # setup kernels if instruction.command.kernel: command_dict.update({'kernels': [QobjMeasurementOption(name=instruction.command.kernel.name, params=instruction.command.kernel.params)]}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return self._qobj_model(**command_dict)
def run(mapper=None, reducer=None, combiner=None, script_path=None, jobconfs=(), **kw): """Hadoopy entrance function This is to be called in all Hadoopy job's. Handles arguments passed in, calls the provided functions with input, and stores the output. TypedBytes are used if the following is True os.environ['stream_map_input'] == 'typedbytes' It is *highly* recommended that TypedBytes be used for all non-trivial tasks. Keep in mind that the semantics of what you can safely emit from your functions is limited when using Text (i.e., no \\t or \\n). You can use the base64 module to ensure that your output is clean. If the HADOOPY_CHDIR environmental variable is set, this will immediately change the working directory to the one specified. This is useful if your data is provided in an archive but your program assumes it is in that directory. As hadoop streaming relies on stdin/stdout/stderr for communication, anything that outputs on them in an unexpected way (especially stdout) will break the pipe on the Java side and can potentially cause data errors. To fix this problem, hadoopy allows file descriptors (integers) to be provided to each task. These will be used instead of stdin/stdout by hadoopy. This is designed to combine with the 'pipe' command. To use the pipe functionality, instead of using `your_script.py map` use `your_script.py pipe map` which will call the script as a subprocess and use the read_fd/write_fd command line arguments for communication. This isolates your script and eliminates the largest source of errors when using hadoop streaming. The pipe functionality has the following semantics stdin: Always an empty file stdout: Redirected to stderr (which is visible in the hadoop log) stderr: Kept as stderr read_fd: File descriptor that points to the true stdin write_fd: File descriptor that points to the true stdout | **Command Interface** | The command line switches added to your script (e.g., script.py) are python script.py *map* (read_fd) (write_fd) Use the provided mapper, optional read_fd/write_fd. python script.py *reduce* (read_fd) (write_fd) Use the provided reducer, optional read_fd/write_fd. python script.py *combine* (read_fd) (write_fd) Use the provided combiner, optional read_fd/write_fd. python script.py *freeze* <tar_path> <-Z add_file0 -Z add_file1...> Freeze the script to a tar file specified by <tar_path>. The extension may be .tar or .tar.gz. All files are placed in the root of the tar. Files specified with -Z will be added to the tar root. python script.py info Prints a json object containing 'tasks' which is a list of tasks which can include 'map', 'combine', and 'reduce'. Also contains 'doc' which is the provided documentation through the doc argument to the run function. The tasks correspond to provided inputs to the run function. | **Specification of mapper/reducer/combiner** | Input Key/Value Types | For TypedBytes/SequenceFileInputFormat, the Key/Value are the decoded TypedBytes | For TextInputFormat, the Key is a byte offset (int) and the Value is a line without the newline (string) | | Output Key/Value Types | For TypedBytes, anything Pickle-able can be used | For Text, types are converted to string. Note that neither may contain \\t or \\n as these are used in the encoding. Output is key\\tvalue\\n | | Expected arguments | mapper(key, value) or mapper.map(key, value) | reducer(key, values) or reducer.reduce(key, values) | combiner(key, values) or combiner.reduce(key, values) | | Optional methods | func.configure(): Called before any input read. Returns None. | func.close(): Called after all input read. Returns None or Iterator of (key, value) | | Expected return | None or Iterator of (key, value) :param mapper: Function or class following the above spec :param reducer: Function or class following the above spec :param combiner: Function or class following the above spec :param doc: If specified, on error print this and call sys.exit(1) """ if script_path is None: script_path = inspect.getfile(inspect.currentframe().f_back) script_path = os.path.abspath(script_path) parser = argparse.ArgumentParser() parser.add_argument('--log', help='Default log level to use', choices=('debug', 'info', 'warning', 'error', 'critical'), default='info') subparsers = parser.add_subparsers(help='Job Commands (additional help available inside each)') parser_freeze = subparsers.add_parser('freeze', help='Freeze the script to a tar file.') parser_freeze.add_argument('tar_path', help='Path to .tar or .tar.gz file.') parser_freeze.add_argument('-Z', help='Path to a file to be placed in the tar root (may be repeated for many files).', action='append') parser_freeze.set_defaults(func=run_freeze) parser_info = subparsers.add_parser('info', help='Display job info as JSON') parser_info.set_defaults(func=lambda : run_info(mapper, reducer, combiner, jobconfs, kw)) parser_launch_frozen = subparsers.add_parser('launch_frozen', help='Run Hadoop job (freezes script)') parser_launch_frozen.add_argument('in_name', help='Input HDFS path') parser_launch_frozen.add_argument('out_name', help='Output HDFS path') parser_launch_frozen.add_argument('-jobconf', help='Jobconf', action='append', dest='jobconfs', default=[]) parser_launch_frozen.set_defaults(func=lambda *args, **kw: hadoopy.launch_frozen(*args, script_path=script_path, **kw)) parser_pipe = subparsers.add_parser('pipe', help='Internal: Run map/combine/reduce task using "pipe hopping" to make stdout redirect to stderr.') parser_pipe.add_argument('command', help='Command to run', choices=('map', 'reduce', 'combine')) parser_pipe.set_defaults(func=run_pipe) parser_map = subparsers.add_parser('map', help='Internal: Run map task.') parser_map.add_argument('read_fd', type=int, help='Read file descriptor', nargs='?') parser_map.add_argument('write_fd', type=int, help='Write file descriptor', nargs='?') parser_map.set_defaults(func=lambda **y: run_task(mapper, reducer, combiner, command='map', **y)) parser_combine = subparsers.add_parser('combine', help='Internal: Run combine task.') parser_combine.add_argument('read_fd', type=int, help='Read file descriptor', nargs='?') parser_combine.add_argument('write_fd', type=int, help='Write file descriptor', nargs='?') parser_combine.set_defaults(func=lambda **y: run_task(mapper, reducer, combiner, command='combine', **y)) parser_reduce = subparsers.add_parser('reduce', help='Internal: Run reduce task.') parser_reduce.add_argument('read_fd', type=int, help='Read file descriptor', nargs='?') parser_reduce.add_argument('write_fd', type=int, help='Write file descriptor', nargs='?') parser_reduce.set_defaults(func=lambda **y: run_task(mapper, reducer, combiner, command='reduce', **y)) args = vars(parser.parse_args()) # Handle logging arguments if 'log' in args: numeric_level = getattr(logging, args['log'].upper(), None) logging.basicConfig(level=numeric_level) del args['log'] # Call function with all arguments except for itself func = args['func'] del args['func'] func(**args)
def function[run, parameter[mapper, reducer, combiner, script_path, jobconfs]]: constant[Hadoopy entrance function This is to be called in all Hadoopy job's. Handles arguments passed in, calls the provided functions with input, and stores the output. TypedBytes are used if the following is True os.environ['stream_map_input'] == 'typedbytes' It is *highly* recommended that TypedBytes be used for all non-trivial tasks. Keep in mind that the semantics of what you can safely emit from your functions is limited when using Text (i.e., no \t or \n). You can use the base64 module to ensure that your output is clean. If the HADOOPY_CHDIR environmental variable is set, this will immediately change the working directory to the one specified. This is useful if your data is provided in an archive but your program assumes it is in that directory. As hadoop streaming relies on stdin/stdout/stderr for communication, anything that outputs on them in an unexpected way (especially stdout) will break the pipe on the Java side and can potentially cause data errors. To fix this problem, hadoopy allows file descriptors (integers) to be provided to each task. These will be used instead of stdin/stdout by hadoopy. This is designed to combine with the 'pipe' command. To use the pipe functionality, instead of using `your_script.py map` use `your_script.py pipe map` which will call the script as a subprocess and use the read_fd/write_fd command line arguments for communication. This isolates your script and eliminates the largest source of errors when using hadoop streaming. The pipe functionality has the following semantics stdin: Always an empty file stdout: Redirected to stderr (which is visible in the hadoop log) stderr: Kept as stderr read_fd: File descriptor that points to the true stdin write_fd: File descriptor that points to the true stdout | **Command Interface** | The command line switches added to your script (e.g., script.py) are python script.py *map* (read_fd) (write_fd) Use the provided mapper, optional read_fd/write_fd. python script.py *reduce* (read_fd) (write_fd) Use the provided reducer, optional read_fd/write_fd. python script.py *combine* (read_fd) (write_fd) Use the provided combiner, optional read_fd/write_fd. python script.py *freeze* <tar_path> <-Z add_file0 -Z add_file1...> Freeze the script to a tar file specified by <tar_path>. The extension may be .tar or .tar.gz. All files are placed in the root of the tar. Files specified with -Z will be added to the tar root. python script.py info Prints a json object containing 'tasks' which is a list of tasks which can include 'map', 'combine', and 'reduce'. Also contains 'doc' which is the provided documentation through the doc argument to the run function. The tasks correspond to provided inputs to the run function. | **Specification of mapper/reducer/combiner** | Input Key/Value Types | For TypedBytes/SequenceFileInputFormat, the Key/Value are the decoded TypedBytes | For TextInputFormat, the Key is a byte offset (int) and the Value is a line without the newline (string) | | Output Key/Value Types | For TypedBytes, anything Pickle-able can be used | For Text, types are converted to string. Note that neither may contain \t or \n as these are used in the encoding. Output is key\tvalue\n | | Expected arguments | mapper(key, value) or mapper.map(key, value) | reducer(key, values) or reducer.reduce(key, values) | combiner(key, values) or combiner.reduce(key, values) | | Optional methods | func.configure(): Called before any input read. Returns None. | func.close(): Called after all input read. Returns None or Iterator of (key, value) | | Expected return | None or Iterator of (key, value) :param mapper: Function or class following the above spec :param reducer: Function or class following the above spec :param combiner: Function or class following the above spec :param doc: If specified, on error print this and call sys.exit(1) ] if compare[name[script_path] is constant[None]] begin[:] variable[script_path] assign[=] call[name[inspect].getfile, parameter[call[name[inspect].currentframe, parameter[]].f_back]] variable[script_path] assign[=] call[name[os].path.abspath, parameter[name[script_path]]] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[--log]]] variable[subparsers] assign[=] call[name[parser].add_subparsers, parameter[]] variable[parser_freeze] assign[=] call[name[subparsers].add_parser, parameter[constant[freeze]]] call[name[parser_freeze].add_argument, parameter[constant[tar_path]]] call[name[parser_freeze].add_argument, parameter[constant[-Z]]] call[name[parser_freeze].set_defaults, parameter[]] variable[parser_info] assign[=] call[name[subparsers].add_parser, parameter[constant[info]]] call[name[parser_info].set_defaults, parameter[]] variable[parser_launch_frozen] assign[=] call[name[subparsers].add_parser, parameter[constant[launch_frozen]]] call[name[parser_launch_frozen].add_argument, parameter[constant[in_name]]] call[name[parser_launch_frozen].add_argument, parameter[constant[out_name]]] call[name[parser_launch_frozen].add_argument, parameter[constant[-jobconf]]] call[name[parser_launch_frozen].set_defaults, parameter[]] variable[parser_pipe] assign[=] call[name[subparsers].add_parser, parameter[constant[pipe]]] call[name[parser_pipe].add_argument, parameter[constant[command]]] call[name[parser_pipe].set_defaults, parameter[]] variable[parser_map] assign[=] call[name[subparsers].add_parser, parameter[constant[map]]] call[name[parser_map].add_argument, parameter[constant[read_fd]]] call[name[parser_map].add_argument, parameter[constant[write_fd]]] call[name[parser_map].set_defaults, parameter[]] variable[parser_combine] assign[=] call[name[subparsers].add_parser, parameter[constant[combine]]] call[name[parser_combine].add_argument, parameter[constant[read_fd]]] call[name[parser_combine].add_argument, parameter[constant[write_fd]]] call[name[parser_combine].set_defaults, parameter[]] variable[parser_reduce] assign[=] call[name[subparsers].add_parser, parameter[constant[reduce]]] call[name[parser_reduce].add_argument, parameter[constant[read_fd]]] call[name[parser_reduce].add_argument, parameter[constant[write_fd]]] call[name[parser_reduce].set_defaults, parameter[]] variable[args] assign[=] call[name[vars], parameter[call[name[parser].parse_args, parameter[]]]] if compare[constant[log] in name[args]] begin[:] variable[numeric_level] assign[=] call[name[getattr], parameter[name[logging], call[call[name[args]][constant[log]].upper, parameter[]], constant[None]]] call[name[logging].basicConfig, parameter[]] <ast.Delete object at 0x7da18f7216f0> variable[func] assign[=] call[name[args]][constant[func]] <ast.Delete object at 0x7da18f723880> call[name[func], parameter[]]
keyword[def] identifier[run] ( identifier[mapper] = keyword[None] , identifier[reducer] = keyword[None] , identifier[combiner] = keyword[None] , identifier[script_path] = keyword[None] , identifier[jobconfs] =(),** identifier[kw] ): literal[string] keyword[if] identifier[script_path] keyword[is] keyword[None] : identifier[script_path] = identifier[inspect] . identifier[getfile] ( identifier[inspect] . identifier[currentframe] (). identifier[f_back] ) identifier[script_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[script_path] ) identifier[parser] = identifier[argparse] . identifier[ArgumentParser] () identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[choices] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ), identifier[default] = literal[string] ) identifier[subparsers] = identifier[parser] . identifier[add_subparsers] ( identifier[help] = literal[string] ) identifier[parser_freeze] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_freeze] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_freeze] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[action] = literal[string] ) identifier[parser_freeze] . identifier[set_defaults] ( identifier[func] = identifier[run_freeze] ) identifier[parser_info] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_info] . identifier[set_defaults] ( identifier[func] = keyword[lambda] : identifier[run_info] ( identifier[mapper] , identifier[reducer] , identifier[combiner] , identifier[jobconfs] , identifier[kw] )) identifier[parser_launch_frozen] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_launch_frozen] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_launch_frozen] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_launch_frozen] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[action] = literal[string] , identifier[dest] = literal[string] , identifier[default] =[]) identifier[parser_launch_frozen] . identifier[set_defaults] ( identifier[func] = keyword[lambda] * identifier[args] ,** identifier[kw] : identifier[hadoopy] . identifier[launch_frozen] (* identifier[args] , identifier[script_path] = identifier[script_path] ,** identifier[kw] )) identifier[parser_pipe] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_pipe] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[choices] =( literal[string] , literal[string] , literal[string] )) identifier[parser_pipe] . identifier[set_defaults] ( identifier[func] = identifier[run_pipe] ) identifier[parser_map] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_map] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[parser_map] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[parser_map] . identifier[set_defaults] ( identifier[func] = keyword[lambda] ** identifier[y] : identifier[run_task] ( identifier[mapper] , identifier[reducer] , identifier[combiner] , identifier[command] = literal[string] ,** identifier[y] )) identifier[parser_combine] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_combine] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[parser_combine] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[parser_combine] . identifier[set_defaults] ( identifier[func] = keyword[lambda] ** identifier[y] : identifier[run_task] ( identifier[mapper] , identifier[reducer] , identifier[combiner] , identifier[command] = literal[string] ,** identifier[y] )) identifier[parser_reduce] = identifier[subparsers] . identifier[add_parser] ( literal[string] , identifier[help] = literal[string] ) identifier[parser_reduce] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[parser_reduce] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[int] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[parser_reduce] . identifier[set_defaults] ( identifier[func] = keyword[lambda] ** identifier[y] : identifier[run_task] ( identifier[mapper] , identifier[reducer] , identifier[combiner] , identifier[command] = literal[string] ,** identifier[y] )) identifier[args] = identifier[vars] ( identifier[parser] . identifier[parse_args] ()) keyword[if] literal[string] keyword[in] identifier[args] : identifier[numeric_level] = identifier[getattr] ( identifier[logging] , identifier[args] [ literal[string] ]. identifier[upper] (), keyword[None] ) identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[numeric_level] ) keyword[del] identifier[args] [ literal[string] ] identifier[func] = identifier[args] [ literal[string] ] keyword[del] identifier[args] [ literal[string] ] identifier[func] (** identifier[args] )
def run(mapper=None, reducer=None, combiner=None, script_path=None, jobconfs=(), **kw): """Hadoopy entrance function This is to be called in all Hadoopy job's. Handles arguments passed in, calls the provided functions with input, and stores the output. TypedBytes are used if the following is True os.environ['stream_map_input'] == 'typedbytes' It is *highly* recommended that TypedBytes be used for all non-trivial tasks. Keep in mind that the semantics of what you can safely emit from your functions is limited when using Text (i.e., no \\t or \\n). You can use the base64 module to ensure that your output is clean. If the HADOOPY_CHDIR environmental variable is set, this will immediately change the working directory to the one specified. This is useful if your data is provided in an archive but your program assumes it is in that directory. As hadoop streaming relies on stdin/stdout/stderr for communication, anything that outputs on them in an unexpected way (especially stdout) will break the pipe on the Java side and can potentially cause data errors. To fix this problem, hadoopy allows file descriptors (integers) to be provided to each task. These will be used instead of stdin/stdout by hadoopy. This is designed to combine with the 'pipe' command. To use the pipe functionality, instead of using `your_script.py map` use `your_script.py pipe map` which will call the script as a subprocess and use the read_fd/write_fd command line arguments for communication. This isolates your script and eliminates the largest source of errors when using hadoop streaming. The pipe functionality has the following semantics stdin: Always an empty file stdout: Redirected to stderr (which is visible in the hadoop log) stderr: Kept as stderr read_fd: File descriptor that points to the true stdin write_fd: File descriptor that points to the true stdout | **Command Interface** | The command line switches added to your script (e.g., script.py) are python script.py *map* (read_fd) (write_fd) Use the provided mapper, optional read_fd/write_fd. python script.py *reduce* (read_fd) (write_fd) Use the provided reducer, optional read_fd/write_fd. python script.py *combine* (read_fd) (write_fd) Use the provided combiner, optional read_fd/write_fd. python script.py *freeze* <tar_path> <-Z add_file0 -Z add_file1...> Freeze the script to a tar file specified by <tar_path>. The extension may be .tar or .tar.gz. All files are placed in the root of the tar. Files specified with -Z will be added to the tar root. python script.py info Prints a json object containing 'tasks' which is a list of tasks which can include 'map', 'combine', and 'reduce'. Also contains 'doc' which is the provided documentation through the doc argument to the run function. The tasks correspond to provided inputs to the run function. | **Specification of mapper/reducer/combiner** | Input Key/Value Types | For TypedBytes/SequenceFileInputFormat, the Key/Value are the decoded TypedBytes | For TextInputFormat, the Key is a byte offset (int) and the Value is a line without the newline (string) | | Output Key/Value Types | For TypedBytes, anything Pickle-able can be used | For Text, types are converted to string. Note that neither may contain \\t or \\n as these are used in the encoding. Output is key\\tvalue\\n | | Expected arguments | mapper(key, value) or mapper.map(key, value) | reducer(key, values) or reducer.reduce(key, values) | combiner(key, values) or combiner.reduce(key, values) | | Optional methods | func.configure(): Called before any input read. Returns None. | func.close(): Called after all input read. Returns None or Iterator of (key, value) | | Expected return | None or Iterator of (key, value) :param mapper: Function or class following the above spec :param reducer: Function or class following the above spec :param combiner: Function or class following the above spec :param doc: If specified, on error print this and call sys.exit(1) """ if script_path is None: script_path = inspect.getfile(inspect.currentframe().f_back) # depends on [control=['if'], data=['script_path']] script_path = os.path.abspath(script_path) parser = argparse.ArgumentParser() parser.add_argument('--log', help='Default log level to use', choices=('debug', 'info', 'warning', 'error', 'critical'), default='info') subparsers = parser.add_subparsers(help='Job Commands (additional help available inside each)') parser_freeze = subparsers.add_parser('freeze', help='Freeze the script to a tar file.') parser_freeze.add_argument('tar_path', help='Path to .tar or .tar.gz file.') parser_freeze.add_argument('-Z', help='Path to a file to be placed in the tar root (may be repeated for many files).', action='append') parser_freeze.set_defaults(func=run_freeze) parser_info = subparsers.add_parser('info', help='Display job info as JSON') parser_info.set_defaults(func=lambda : run_info(mapper, reducer, combiner, jobconfs, kw)) parser_launch_frozen = subparsers.add_parser('launch_frozen', help='Run Hadoop job (freezes script)') parser_launch_frozen.add_argument('in_name', help='Input HDFS path') parser_launch_frozen.add_argument('out_name', help='Output HDFS path') parser_launch_frozen.add_argument('-jobconf', help='Jobconf', action='append', dest='jobconfs', default=[]) parser_launch_frozen.set_defaults(func=lambda *args, **kw: hadoopy.launch_frozen(*args, script_path=script_path, **kw)) parser_pipe = subparsers.add_parser('pipe', help='Internal: Run map/combine/reduce task using "pipe hopping" to make stdout redirect to stderr.') parser_pipe.add_argument('command', help='Command to run', choices=('map', 'reduce', 'combine')) parser_pipe.set_defaults(func=run_pipe) parser_map = subparsers.add_parser('map', help='Internal: Run map task.') parser_map.add_argument('read_fd', type=int, help='Read file descriptor', nargs='?') parser_map.add_argument('write_fd', type=int, help='Write file descriptor', nargs='?') parser_map.set_defaults(func=lambda **y: run_task(mapper, reducer, combiner, command='map', **y)) parser_combine = subparsers.add_parser('combine', help='Internal: Run combine task.') parser_combine.add_argument('read_fd', type=int, help='Read file descriptor', nargs='?') parser_combine.add_argument('write_fd', type=int, help='Write file descriptor', nargs='?') parser_combine.set_defaults(func=lambda **y: run_task(mapper, reducer, combiner, command='combine', **y)) parser_reduce = subparsers.add_parser('reduce', help='Internal: Run reduce task.') parser_reduce.add_argument('read_fd', type=int, help='Read file descriptor', nargs='?') parser_reduce.add_argument('write_fd', type=int, help='Write file descriptor', nargs='?') parser_reduce.set_defaults(func=lambda **y: run_task(mapper, reducer, combiner, command='reduce', **y)) args = vars(parser.parse_args()) # Handle logging arguments if 'log' in args: numeric_level = getattr(logging, args['log'].upper(), None) logging.basicConfig(level=numeric_level) del args['log'] # depends on [control=['if'], data=['args']] # Call function with all arguments except for itself func = args['func'] del args['func'] func(**args)
def unixtime_to_datetimestr(unixtime, timefmt='%Y/%m/%d %H:%M:%S', isutc=True): """ TODO: ranme to datetimestr """ try: if unixtime == -1: return 'NA' if unixtime is None: return None if isutc: return datetime.datetime.utcfromtimestamp(unixtime).strftime(timefmt) else: return datetime.datetime.fromtimestamp(unixtime).strftime(timefmt) except ValueError: raise
def function[unixtime_to_datetimestr, parameter[unixtime, timefmt, isutc]]: constant[ TODO: ranme to datetimestr ] <ast.Try object at 0x7da1b24b3f40>
keyword[def] identifier[unixtime_to_datetimestr] ( identifier[unixtime] , identifier[timefmt] = literal[string] , identifier[isutc] = keyword[True] ): literal[string] keyword[try] : keyword[if] identifier[unixtime] ==- literal[int] : keyword[return] literal[string] keyword[if] identifier[unixtime] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[isutc] : keyword[return] identifier[datetime] . identifier[datetime] . identifier[utcfromtimestamp] ( identifier[unixtime] ). identifier[strftime] ( identifier[timefmt] ) keyword[else] : keyword[return] identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[unixtime] ). identifier[strftime] ( identifier[timefmt] ) keyword[except] identifier[ValueError] : keyword[raise]
def unixtime_to_datetimestr(unixtime, timefmt='%Y/%m/%d %H:%M:%S', isutc=True): """ TODO: ranme to datetimestr """ try: if unixtime == -1: return 'NA' # depends on [control=['if'], data=[]] if unixtime is None: return None # depends on [control=['if'], data=[]] if isutc: return datetime.datetime.utcfromtimestamp(unixtime).strftime(timefmt) # depends on [control=['if'], data=[]] else: return datetime.datetime.fromtimestamp(unixtime).strftime(timefmt) # depends on [control=['try'], data=[]] except ValueError: raise # depends on [control=['except'], data=[]]
def createScanner(self, login, tableName, options): """ Parameters: - login - tableName - options """ self.send_createScanner(login, tableName, options) return self.recv_createScanner()
def function[createScanner, parameter[self, login, tableName, options]]: constant[ Parameters: - login - tableName - options ] call[name[self].send_createScanner, parameter[name[login], name[tableName], name[options]]] return[call[name[self].recv_createScanner, parameter[]]]
keyword[def] identifier[createScanner] ( identifier[self] , identifier[login] , identifier[tableName] , identifier[options] ): literal[string] identifier[self] . identifier[send_createScanner] ( identifier[login] , identifier[tableName] , identifier[options] ) keyword[return] identifier[self] . identifier[recv_createScanner] ()
def createScanner(self, login, tableName, options): """ Parameters: - login - tableName - options """ self.send_createScanner(login, tableName, options) return self.recv_createScanner()
def data(args): """ %prog data data.bin samples.ids STR.ids meta.tsv Make data.tsv based on meta.tsv. """ p = OptionParser(data.__doc__) p.add_option("--notsv", default=False, action="store_true", help="Do not write data.tsv") opts, args = p.parse_args(args) if len(args) != 4: sys.exit(not p.print_help()) databin, sampleids, strids, metafile = args final_columns, percentiles = read_meta(metafile) df, m, samples, loci = read_binfile(databin, sampleids, strids) # Clean the data m %= 1000 # Get the larger of the two alleles m[m == 999] = -1 # Missing data final = set(final_columns) remove = [] for i, locus in enumerate(loci): if locus not in final: remove.append(locus) continue pf = "STRs_{}_SEARCH".format(timestamp()) filteredstrids = "{}.STR.ids".format(pf) fw = open(filteredstrids, "w") print("\n".join(final_columns), file=fw) fw.close() logging.debug("Dropped {} columns; Retained {} columns (`{}`)".\ format(len(remove), len(final_columns), filteredstrids)) # Remove low-quality columns! df.drop(remove, inplace=True, axis=1) df.columns = final_columns filtered_bin = "{}.data.bin".format(pf) if need_update(databin, filtered_bin): m = df.as_matrix() m.tofile(filtered_bin) logging.debug("Filtered binary matrix written to `{}`".format(filtered_bin)) # Write data output filtered_tsv = "{}.data.tsv".format(pf) if not opts.notsv and need_update(databin, filtered_tsv): df.to_csv(filtered_tsv, sep="\t", index_label="SampleKey")
def function[data, parameter[args]]: constant[ %prog data data.bin samples.ids STR.ids meta.tsv Make data.tsv based on meta.tsv. ] variable[p] assign[=] call[name[OptionParser], parameter[name[data].__doc__]] call[name[p].add_option, parameter[constant[--notsv]]] <ast.Tuple object at 0x7da1b0976710> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[4]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b0976200>]] <ast.Tuple object at 0x7da1b0976260> assign[=] name[args] <ast.Tuple object at 0x7da1b0976bc0> assign[=] call[name[read_meta], parameter[name[metafile]]] <ast.Tuple object at 0x7da1b09763b0> assign[=] call[name[read_binfile], parameter[name[databin], name[sampleids], name[strids]]] <ast.AugAssign object at 0x7da1b0977010> call[name[m]][compare[name[m] equal[==] constant[999]]] assign[=] <ast.UnaryOp object at 0x7da1b0976e30> variable[final] assign[=] call[name[set], parameter[name[final_columns]]] variable[remove] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b09762c0>, <ast.Name object at 0x7da1b0976290>]]] in starred[call[name[enumerate], parameter[name[loci]]]] begin[:] if compare[name[locus] <ast.NotIn object at 0x7da2590d7190> name[final]] begin[:] call[name[remove].append, parameter[name[locus]]] continue variable[pf] assign[=] call[constant[STRs_{}_SEARCH].format, parameter[call[name[timestamp], parameter[]]]] variable[filteredstrids] assign[=] call[constant[{}.STR.ids].format, parameter[name[pf]]] variable[fw] assign[=] call[name[open], parameter[name[filteredstrids], constant[w]]] call[name[print], parameter[call[constant[ ].join, parameter[name[final_columns]]]]] call[name[fw].close, parameter[]] call[name[logging].debug, parameter[call[constant[Dropped {} columns; Retained {} columns (`{}`)].format, parameter[call[name[len], parameter[name[remove]]], call[name[len], parameter[name[final_columns]]], name[filteredstrids]]]]] call[name[df].drop, parameter[name[remove]]] name[df].columns assign[=] name[final_columns] variable[filtered_bin] assign[=] call[constant[{}.data.bin].format, parameter[name[pf]]] if call[name[need_update], parameter[name[databin], name[filtered_bin]]] begin[:] variable[m] assign[=] call[name[df].as_matrix, parameter[]] call[name[m].tofile, parameter[name[filtered_bin]]] call[name[logging].debug, parameter[call[constant[Filtered binary matrix written to `{}`].format, parameter[name[filtered_bin]]]]] variable[filtered_tsv] assign[=] call[constant[{}.data.tsv].format, parameter[name[pf]]] if <ast.BoolOp object at 0x7da1b08d12a0> begin[:] call[name[df].to_csv, parameter[name[filtered_tsv]]]
keyword[def] identifier[data] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[data] . identifier[__doc__] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[databin] , identifier[sampleids] , identifier[strids] , identifier[metafile] = identifier[args] identifier[final_columns] , identifier[percentiles] = identifier[read_meta] ( identifier[metafile] ) identifier[df] , identifier[m] , identifier[samples] , identifier[loci] = identifier[read_binfile] ( identifier[databin] , identifier[sampleids] , identifier[strids] ) identifier[m] %= literal[int] identifier[m] [ identifier[m] == literal[int] ]=- literal[int] identifier[final] = identifier[set] ( identifier[final_columns] ) identifier[remove] =[] keyword[for] identifier[i] , identifier[locus] keyword[in] identifier[enumerate] ( identifier[loci] ): keyword[if] identifier[locus] keyword[not] keyword[in] identifier[final] : identifier[remove] . identifier[append] ( identifier[locus] ) keyword[continue] identifier[pf] = literal[string] . identifier[format] ( identifier[timestamp] ()) identifier[filteredstrids] = literal[string] . identifier[format] ( identifier[pf] ) identifier[fw] = identifier[open] ( identifier[filteredstrids] , literal[string] ) identifier[print] ( literal[string] . identifier[join] ( identifier[final_columns] ), identifier[file] = identifier[fw] ) identifier[fw] . identifier[close] () identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[remove] ), identifier[len] ( identifier[final_columns] ), identifier[filteredstrids] )) identifier[df] . identifier[drop] ( identifier[remove] , identifier[inplace] = keyword[True] , identifier[axis] = literal[int] ) identifier[df] . identifier[columns] = identifier[final_columns] identifier[filtered_bin] = literal[string] . identifier[format] ( identifier[pf] ) keyword[if] identifier[need_update] ( identifier[databin] , identifier[filtered_bin] ): identifier[m] = identifier[df] . identifier[as_matrix] () identifier[m] . identifier[tofile] ( identifier[filtered_bin] ) identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[filtered_bin] )) identifier[filtered_tsv] = literal[string] . identifier[format] ( identifier[pf] ) keyword[if] keyword[not] identifier[opts] . identifier[notsv] keyword[and] identifier[need_update] ( identifier[databin] , identifier[filtered_tsv] ): identifier[df] . identifier[to_csv] ( identifier[filtered_tsv] , identifier[sep] = literal[string] , identifier[index_label] = literal[string] )
def data(args): """ %prog data data.bin samples.ids STR.ids meta.tsv Make data.tsv based on meta.tsv. """ p = OptionParser(data.__doc__) p.add_option('--notsv', default=False, action='store_true', help='Do not write data.tsv') (opts, args) = p.parse_args(args) if len(args) != 4: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (databin, sampleids, strids, metafile) = args (final_columns, percentiles) = read_meta(metafile) (df, m, samples, loci) = read_binfile(databin, sampleids, strids) # Clean the data m %= 1000 # Get the larger of the two alleles m[m == 999] = -1 # Missing data final = set(final_columns) remove = [] for (i, locus) in enumerate(loci): if locus not in final: remove.append(locus) continue # depends on [control=['if'], data=['locus']] # depends on [control=['for'], data=[]] pf = 'STRs_{}_SEARCH'.format(timestamp()) filteredstrids = '{}.STR.ids'.format(pf) fw = open(filteredstrids, 'w') print('\n'.join(final_columns), file=fw) fw.close() logging.debug('Dropped {} columns; Retained {} columns (`{}`)'.format(len(remove), len(final_columns), filteredstrids)) # Remove low-quality columns! df.drop(remove, inplace=True, axis=1) df.columns = final_columns filtered_bin = '{}.data.bin'.format(pf) if need_update(databin, filtered_bin): m = df.as_matrix() m.tofile(filtered_bin) logging.debug('Filtered binary matrix written to `{}`'.format(filtered_bin)) # depends on [control=['if'], data=[]] # Write data output filtered_tsv = '{}.data.tsv'.format(pf) if not opts.notsv and need_update(databin, filtered_tsv): df.to_csv(filtered_tsv, sep='\t', index_label='SampleKey') # depends on [control=['if'], data=[]]
def is_smart(self, value): """Set group is_smart property to value. Args: value: Boolean. """ self.set_bool("is_smart", value) if value is True: if self.find("criteria") is None: # pylint: disable=attribute-defined-outside-init self.criteria = ElementTree.SubElement(self, "criteria")
def function[is_smart, parameter[self, value]]: constant[Set group is_smart property to value. Args: value: Boolean. ] call[name[self].set_bool, parameter[constant[is_smart], name[value]]] if compare[name[value] is constant[True]] begin[:] if compare[call[name[self].find, parameter[constant[criteria]]] is constant[None]] begin[:] name[self].criteria assign[=] call[name[ElementTree].SubElement, parameter[name[self], constant[criteria]]]
keyword[def] identifier[is_smart] ( identifier[self] , identifier[value] ): literal[string] identifier[self] . identifier[set_bool] ( literal[string] , identifier[value] ) keyword[if] identifier[value] keyword[is] keyword[True] : keyword[if] identifier[self] . identifier[find] ( literal[string] ) keyword[is] keyword[None] : identifier[self] . identifier[criteria] = identifier[ElementTree] . identifier[SubElement] ( identifier[self] , literal[string] )
def is_smart(self, value): """Set group is_smart property to value. Args: value: Boolean. """ self.set_bool('is_smart', value) if value is True: if self.find('criteria') is None: # pylint: disable=attribute-defined-outside-init self.criteria = ElementTree.SubElement(self, 'criteria') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def convert_invalid_url(url): """Convert invalid url with adding extra 'http://' schema into it :param url: :return: """ regex_valid_url = re.compile( r'^(?:http|ftp)s?://' # http:// or https:// r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|' #domain... r'localhost|' #localhost... r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip r'(?::\d+)?' # optional port r'(?:/?|[/?]\S+)$', re.IGNORECASE) return url if regex_valid_url.match(url) else 'http://{}'.format(url)
def function[convert_invalid_url, parameter[url]]: constant[Convert invalid url with adding extra 'http://' schema into it :param url: :return: ] variable[regex_valid_url] assign[=] call[name[re].compile, parameter[constant[^(?:http|ftp)s?://(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|localhost|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})(?::\d+)?(?:/?|[/?]\S+)$], name[re].IGNORECASE]] return[<ast.IfExp object at 0x7da1b0924ca0>]
keyword[def] identifier[convert_invalid_url] ( identifier[url] ): literal[string] identifier[regex_valid_url] = identifier[re] . identifier[compile] ( literal[string] literal[string] literal[string] literal[string] literal[string] literal[string] , identifier[re] . identifier[IGNORECASE] ) keyword[return] identifier[url] keyword[if] identifier[regex_valid_url] . identifier[match] ( identifier[url] ) keyword[else] literal[string] . identifier[format] ( identifier[url] )
def convert_invalid_url(url): """Convert invalid url with adding extra 'http://' schema into it :param url: :return: """ # http:// or https:// #domain... #localhost... # ...or ip # optional port regex_valid_url = re.compile('^(?:http|ftp)s?://(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?)|localhost|\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3})(?::\\d+)?(?:/?|[/?]\\S+)$', re.IGNORECASE) return url if regex_valid_url.match(url) else 'http://{}'.format(url)
def dump_engines(target=sys.stderr): """Print successfully imported templating engines.""" print("Available templating engines:", file=target) width = max(len(engine) for engine in engines.engines) for handle, engine in sorted(engines.engines.items()): description = engine.__doc__.split('\n', 0)[0] print(" %-*s - %s" % (width, handle, description), file=target)
def function[dump_engines, parameter[target]]: constant[Print successfully imported templating engines.] call[name[print], parameter[constant[Available templating engines:]]] variable[width] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b15e7070>]] for taget[tuple[[<ast.Name object at 0x7da1b15e5030>, <ast.Name object at 0x7da1b15e4fd0>]]] in starred[call[name[sorted], parameter[call[name[engines].engines.items, parameter[]]]]] begin[:] variable[description] assign[=] call[call[name[engine].__doc__.split, parameter[constant[ ], constant[0]]]][constant[0]] call[name[print], parameter[binary_operation[constant[ %-*s - %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b15e58d0>, <ast.Name object at 0x7da1b15e4130>, <ast.Name object at 0x7da1b15e4310>]]]]]
keyword[def] identifier[dump_engines] ( identifier[target] = identifier[sys] . identifier[stderr] ): literal[string] identifier[print] ( literal[string] , identifier[file] = identifier[target] ) identifier[width] = identifier[max] ( identifier[len] ( identifier[engine] ) keyword[for] identifier[engine] keyword[in] identifier[engines] . identifier[engines] ) keyword[for] identifier[handle] , identifier[engine] keyword[in] identifier[sorted] ( identifier[engines] . identifier[engines] . identifier[items] ()): identifier[description] = identifier[engine] . identifier[__doc__] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] identifier[print] ( literal[string] %( identifier[width] , identifier[handle] , identifier[description] ), identifier[file] = identifier[target] )
def dump_engines(target=sys.stderr): """Print successfully imported templating engines.""" print('Available templating engines:', file=target) width = max((len(engine) for engine in engines.engines)) for (handle, engine) in sorted(engines.engines.items()): description = engine.__doc__.split('\n', 0)[0] print(' %-*s - %s' % (width, handle, description), file=target) # depends on [control=['for'], data=[]]
def multi_split(s, split): # type: (S, Iterable[S]) -> List[S] """Splits on multiple given separators.""" for r in split: s = s.replace(r, "|") return [i for i in s.split("|") if len(i) > 0]
def function[multi_split, parameter[s, split]]: constant[Splits on multiple given separators.] for taget[name[r]] in starred[name[split]] begin[:] variable[s] assign[=] call[name[s].replace, parameter[name[r], constant[|]]] return[<ast.ListComp object at 0x7da2044c04c0>]
keyword[def] identifier[multi_split] ( identifier[s] , identifier[split] ): literal[string] keyword[for] identifier[r] keyword[in] identifier[split] : identifier[s] = identifier[s] . identifier[replace] ( identifier[r] , literal[string] ) keyword[return] [ identifier[i] keyword[for] identifier[i] keyword[in] identifier[s] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[i] )> literal[int] ]
def multi_split(s, split): # type: (S, Iterable[S]) -> List[S] 'Splits on multiple given separators.' for r in split: s = s.replace(r, '|') # depends on [control=['for'], data=['r']] return [i for i in s.split('|') if len(i) > 0]
def _rollaxis_right(A, num_rolls): """ Roll the tensor `A` forward `num_rolls` times """ assert num_rolls > 0 rank = tf.rank(A) perm = tf.concat([rank - num_rolls + tf.range(num_rolls), tf.range(rank - num_rolls)], 0) return tf.transpose(A, perm)
def function[_rollaxis_right, parameter[A, num_rolls]]: constant[ Roll the tensor `A` forward `num_rolls` times ] assert[compare[name[num_rolls] greater[>] constant[0]]] variable[rank] assign[=] call[name[tf].rank, parameter[name[A]]] variable[perm] assign[=] call[name[tf].concat, parameter[list[[<ast.BinOp object at 0x7da1b1c988e0>, <ast.Call object at 0x7da1b1c98df0>]], constant[0]]] return[call[name[tf].transpose, parameter[name[A], name[perm]]]]
keyword[def] identifier[_rollaxis_right] ( identifier[A] , identifier[num_rolls] ): literal[string] keyword[assert] identifier[num_rolls] > literal[int] identifier[rank] = identifier[tf] . identifier[rank] ( identifier[A] ) identifier[perm] = identifier[tf] . identifier[concat] ([ identifier[rank] - identifier[num_rolls] + identifier[tf] . identifier[range] ( identifier[num_rolls] ), identifier[tf] . identifier[range] ( identifier[rank] - identifier[num_rolls] )], literal[int] ) keyword[return] identifier[tf] . identifier[transpose] ( identifier[A] , identifier[perm] )
def _rollaxis_right(A, num_rolls): """ Roll the tensor `A` forward `num_rolls` times """ assert num_rolls > 0 rank = tf.rank(A) perm = tf.concat([rank - num_rolls + tf.range(num_rolls), tf.range(rank - num_rolls)], 0) return tf.transpose(A, perm)
def setPresence(self, status=SkypeUtils.Status.Online): """ Set the current user's presence on the network. Supports :attr:`.Status.Online`, :attr:`.Status.Busy` or :attr:`.Status.Hidden` (shown as :attr:`.Status.Offline` to others). Args: status (.Status): new availability to display to contacts """ self.conn("PUT", "{0}/users/ME/presenceDocs/messagingService".format(self.conn.msgsHost), auth=SkypeConnection.Auth.RegToken, json={"status": status.label})
def function[setPresence, parameter[self, status]]: constant[ Set the current user's presence on the network. Supports :attr:`.Status.Online`, :attr:`.Status.Busy` or :attr:`.Status.Hidden` (shown as :attr:`.Status.Offline` to others). Args: status (.Status): new availability to display to contacts ] call[name[self].conn, parameter[constant[PUT], call[constant[{0}/users/ME/presenceDocs/messagingService].format, parameter[name[self].conn.msgsHost]]]]
keyword[def] identifier[setPresence] ( identifier[self] , identifier[status] = identifier[SkypeUtils] . identifier[Status] . identifier[Online] ): literal[string] identifier[self] . identifier[conn] ( literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[conn] . identifier[msgsHost] ), identifier[auth] = identifier[SkypeConnection] . identifier[Auth] . identifier[RegToken] , identifier[json] ={ literal[string] : identifier[status] . identifier[label] })
def setPresence(self, status=SkypeUtils.Status.Online): """ Set the current user's presence on the network. Supports :attr:`.Status.Online`, :attr:`.Status.Busy` or :attr:`.Status.Hidden` (shown as :attr:`.Status.Offline` to others). Args: status (.Status): new availability to display to contacts """ self.conn('PUT', '{0}/users/ME/presenceDocs/messagingService'.format(self.conn.msgsHost), auth=SkypeConnection.Auth.RegToken, json={'status': status.label})
def lpgamma(p, a): """ Multidimensional gamma / partial gamma function Parameters ---------- p : int something.... a : float something.... Returns ------- Multidimensional gamma / partial gamma function """ res = p * (p - 1.0) / 4.0 * log(pi) for ii in range(1, p+1): res += gammaln(a + (1.0 - ii) / 2.0) return res
def function[lpgamma, parameter[p, a]]: constant[ Multidimensional gamma / partial gamma function Parameters ---------- p : int something.... a : float something.... Returns ------- Multidimensional gamma / partial gamma function ] variable[res] assign[=] binary_operation[binary_operation[binary_operation[name[p] * binary_operation[name[p] - constant[1.0]]] / constant[4.0]] * call[name[log], parameter[name[pi]]]] for taget[name[ii]] in starred[call[name[range], parameter[constant[1], binary_operation[name[p] + constant[1]]]]] begin[:] <ast.AugAssign object at 0x7da20c993a00> return[name[res]]
keyword[def] identifier[lpgamma] ( identifier[p] , identifier[a] ): literal[string] identifier[res] = identifier[p] *( identifier[p] - literal[int] )/ literal[int] * identifier[log] ( identifier[pi] ) keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] , identifier[p] + literal[int] ): identifier[res] += identifier[gammaln] ( identifier[a] +( literal[int] - identifier[ii] )/ literal[int] ) keyword[return] identifier[res]
def lpgamma(p, a): """ Multidimensional gamma / partial gamma function Parameters ---------- p : int something.... a : float something.... Returns ------- Multidimensional gamma / partial gamma function """ res = p * (p - 1.0) / 4.0 * log(pi) for ii in range(1, p + 1): res += gammaln(a + (1.0 - ii) / 2.0) # depends on [control=['for'], data=['ii']] return res
def trajectory_set_item(self, idx, value): """ :param self: mdtraj.Trajectory :param idx: possible slices over frames, :param value: :return: """ import mdtraj assert isinstance(self, mdtraj.Trajectory), type(self) if not isinstance(value, mdtraj.Trajectory): raise TypeError("value to assign is of incorrect type(%s). Should be mdtraj.Trajectory" % type(value)) idx = np.index_exp[idx] frames, atoms = None, None if isinstance(idx, (list, tuple)): if len(idx) == 1: frames, atoms = idx[0], slice(None, None, None) if len(idx) == 2: frames, atoms = idx[0], idx[1] if len(idx) >= 3 or len(idx) == 0: raise IndexError("invalid slice by %s" % idx) self.xyz[frames, atoms] = value.xyz self._time[frames] = value.time self.unitcell_lengths[frames] = value.unitcell_lengths self.unitcell_angles[frames] = value.unitcell_angles
def function[trajectory_set_item, parameter[self, idx, value]]: constant[ :param self: mdtraj.Trajectory :param idx: possible slices over frames, :param value: :return: ] import module[mdtraj] assert[call[name[isinstance], parameter[name[self], name[mdtraj].Trajectory]]] if <ast.UnaryOp object at 0x7da1b078c400> begin[:] <ast.Raise object at 0x7da1b078f8b0> variable[idx] assign[=] call[name[np].index_exp][name[idx]] <ast.Tuple object at 0x7da1b078c040> assign[=] tuple[[<ast.Constant object at 0x7da1b078f9a0>, <ast.Constant object at 0x7da1b078e200>]] if call[name[isinstance], parameter[name[idx], tuple[[<ast.Name object at 0x7da1b078fa60>, <ast.Name object at 0x7da1b078d4e0>]]]] begin[:] if compare[call[name[len], parameter[name[idx]]] equal[==] constant[1]] begin[:] <ast.Tuple object at 0x7da1b078c7f0> assign[=] tuple[[<ast.Subscript object at 0x7da1b078d900>, <ast.Call object at 0x7da1b078da20>]] if compare[call[name[len], parameter[name[idx]]] equal[==] constant[2]] begin[:] <ast.Tuple object at 0x7da1b078e2c0> assign[=] tuple[[<ast.Subscript object at 0x7da1b078d660>, <ast.Subscript object at 0x7da1b078e230>]] if <ast.BoolOp object at 0x7da1b078c100> begin[:] <ast.Raise object at 0x7da1b078ea40> call[name[self].xyz][tuple[[<ast.Name object at 0x7da1b078d840>, <ast.Name object at 0x7da1b078e1a0>]]] assign[=] name[value].xyz call[name[self]._time][name[frames]] assign[=] name[value].time call[name[self].unitcell_lengths][name[frames]] assign[=] name[value].unitcell_lengths call[name[self].unitcell_angles][name[frames]] assign[=] name[value].unitcell_angles
keyword[def] identifier[trajectory_set_item] ( identifier[self] , identifier[idx] , identifier[value] ): literal[string] keyword[import] identifier[mdtraj] keyword[assert] identifier[isinstance] ( identifier[self] , identifier[mdtraj] . identifier[Trajectory] ), identifier[type] ( identifier[self] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[mdtraj] . identifier[Trajectory] ): keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[value] )) identifier[idx] = identifier[np] . identifier[index_exp] [ identifier[idx] ] identifier[frames] , identifier[atoms] = keyword[None] , keyword[None] keyword[if] identifier[isinstance] ( identifier[idx] ,( identifier[list] , identifier[tuple] )): keyword[if] identifier[len] ( identifier[idx] )== literal[int] : identifier[frames] , identifier[atoms] = identifier[idx] [ literal[int] ], identifier[slice] ( keyword[None] , keyword[None] , keyword[None] ) keyword[if] identifier[len] ( identifier[idx] )== literal[int] : identifier[frames] , identifier[atoms] = identifier[idx] [ literal[int] ], identifier[idx] [ literal[int] ] keyword[if] identifier[len] ( identifier[idx] )>= literal[int] keyword[or] identifier[len] ( identifier[idx] )== literal[int] : keyword[raise] identifier[IndexError] ( literal[string] % identifier[idx] ) identifier[self] . identifier[xyz] [ identifier[frames] , identifier[atoms] ]= identifier[value] . identifier[xyz] identifier[self] . identifier[_time] [ identifier[frames] ]= identifier[value] . identifier[time] identifier[self] . identifier[unitcell_lengths] [ identifier[frames] ]= identifier[value] . identifier[unitcell_lengths] identifier[self] . identifier[unitcell_angles] [ identifier[frames] ]= identifier[value] . identifier[unitcell_angles]
def trajectory_set_item(self, idx, value): """ :param self: mdtraj.Trajectory :param idx: possible slices over frames, :param value: :return: """ import mdtraj assert isinstance(self, mdtraj.Trajectory), type(self) if not isinstance(value, mdtraj.Trajectory): raise TypeError('value to assign is of incorrect type(%s). Should be mdtraj.Trajectory' % type(value)) # depends on [control=['if'], data=[]] idx = np.index_exp[idx] (frames, atoms) = (None, None) if isinstance(idx, (list, tuple)): if len(idx) == 1: (frames, atoms) = (idx[0], slice(None, None, None)) # depends on [control=['if'], data=[]] if len(idx) == 2: (frames, atoms) = (idx[0], idx[1]) # depends on [control=['if'], data=[]] if len(idx) >= 3 or len(idx) == 0: raise IndexError('invalid slice by %s' % idx) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.xyz[frames, atoms] = value.xyz self._time[frames] = value.time self.unitcell_lengths[frames] = value.unitcell_lengths self.unitcell_angles[frames] = value.unitcell_angles
def _set(self, name, value): "Proxy to set a property of the widget element." return self.widget(self.widget_element._set(name, value))
def function[_set, parameter[self, name, value]]: constant[Proxy to set a property of the widget element.] return[call[name[self].widget, parameter[call[name[self].widget_element._set, parameter[name[name], name[value]]]]]]
keyword[def] identifier[_set] ( identifier[self] , identifier[name] , identifier[value] ): literal[string] keyword[return] identifier[self] . identifier[widget] ( identifier[self] . identifier[widget_element] . identifier[_set] ( identifier[name] , identifier[value] ))
def _set(self, name, value): """Proxy to set a property of the widget element.""" return self.widget(self.widget_element._set(name, value))
def account_application(self, customer_ip, first_name, last_name, tax_id, date_of_birth, address_line_1, city_name, state_code, postal_code, phone_number, email_address, citizenship_country, employment_status, product_id, funding_amount, account_number, routing_number, backup_withholding=False, phone_type='mobile', accept_tcpa=False, accept_terms=True, address_line_2='', middle_name='', tax_id_type='SSN', secondary_citizenship_country='', job_title='', annual_income=0, cd_term='', funding_type='fundach', account_owner='primary', secondary_application=None): ''' a method to submit application for new account :param customer_ip: string with ip address of applicant :param first_name: string with first name of applicant :param last_name: string with last name of applicant :param tax_id: string with tax id number of applicant :param date_of_birth: string with ISO format of date of birth of applicant :param address_line_1: string with first line of street address of applicant :param city_name: string with name of city of address of applicant :param state_code: string with code for the state of address of applicant :param postal_code: string with postal code of address of applicant :param phone_number: string with phone number and area code of applicant :param email_address: string with email address of applicant :param citizenship_country: string with ISO 3166 alpha-3 country code of citizenship of applicant :param employment_status: string with employment status of applicant :param product_id: integer with id of account product to apply for :param funding_amount: float with amount of dollars to initially fund account :param account_number: string with pre-existing bank account number of applicant :param routing_number: string with aba routing number for bank of pre-existing account of applicant :param backup_withholding: [optional] boolean to indicate backup withholding on accounts of applicant :param phone_type: [optional] string with type of phone of applicant :param accept_tcpa: boolean to accept to be contacted by citizen one marketing on their phone number :param accept_terms: boolean to accept the terms and conditions associated with new account :param address_line_2: [optional] string with second line of address of applicant :param middle_name: [optional] string with middle name of applicant :param tax_id_type: string with type of tax id of applicant :param secondary_citizenship_country: [optional] string with ISO 3166 alpha-3 country code of secondary citizenship :param job_title: [optional] string with job title of applicant :param annual_income: [optional] integer with dollar value of annual income of applicant :param cd_term: [optional] string with term for the cd account product to apply for :param funding_type: string with funding method selected by the applicant to fund new account :param account_owner: string with role of applicant who owns pre-existing bank account :param secondary_application: dictionary with applicant fields of secondary account holder :return: dictionary with successful response details in ['json'] key response details: { "error": "", "code": 200, "method": "GET", "url": "https://...", "headers": { "Location": "https://www.capitalone.com" }, "json": { "applicationId": "adfasdf812381asdf", "applicationStatus": "Approved", "applicationStatusDescription": "The application is approved and account is created", "bankABANumber": "031176110", "accountNumber": "12345678909876" } } ''' title = '%s.account_application' % self.__class__.__name__ from copy import deepcopy # validate general inputs input_fields = { 'customer_ip': customer_ip, 'product_id': product_id, 'cd_term': cd_term, 'funding_type': funding_type, 'funding_amount': funding_amount, 'account_number': account_number, 'routing_number': routing_number, 'account_owner': account_owner, 'accept_terms': accept_terms, 'accept_tcpa': accept_tcpa, 'secondary_application': secondary_application } for key, value in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # validate applicant fields app_fields = { 'address_line_1': address_line_1, 'address_line_2': address_line_2, 'city_name': city_name, 'state_code': state_code, 'postal_code': postal_code, 'first_name': first_name, 'middle_name': middle_name, 'last_name': last_name, 'tax_id_type': tax_id_type, 'tax_id': tax_id, 'date_of_birth': date_of_birth, 'email_address': email_address, 'backup_withholding': backup_withholding, 'citizenship_country': citizenship_country, 'secondary_citizenship_country': secondary_citizenship_country, 'employment_status': employment_status, 'job_title': job_title, 'annual_income': annual_income, 'phone_number': phone_number, 'phone_type': phone_type } for key, value in app_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # construct url url = self.deposits_endpoint + 'account-application' # construct method specific errors error_map = { 404: 'Not Found. No products found for the provided productId.', 409: 'The application could not be processed due to a business error. Currently, this status is only returned when an existing Capital One customer attempts to open a new account using this API.' } # construct headers headers_kwargs = { 'Customer-IP-Address': customer_ip } # construct applicant list applicant_list = [] applicants = [ 'primary' ] if secondary_application: applicants.append('secondary') # iterate over applicants for applicant in applicants: # substitute in secondary application fields if applicant == 'secondary': app_fields = secondary_application for key, value in app_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # construct applicant kwargs applicant_kwargs = { 'applicantRole': applicant, 'firstName': app_fields['first_name'], 'lastName': app_fields['last_name'], 'homeAddress': { 'addressLine1': app_fields['address_line_1'], 'city': app_fields['city_name'], 'postalCode': app_fields['postal_code'] }, 'taxIdType': app_fields['tax_id_type'], 'emailAddress': app_fields['email_address'], 'backupWithholding': app_fields['backup_withholding'], 'employmentStatus': app_fields['employment_status'] } # add optional middle name and second address if app_fields['middle_name']: applicant_kwargs['middleName'] = app_fields['middle_name'] # add state code from labpack.datasets.iso_3166_2_US import compile_map as map_3166_2 state_codes = map_3166_2() if not app_fields['state_code'] in state_codes.keys(): raise ValueError('%s(state_code=%s) must be a valid 3 letter country code.' % (title, app_fields['state_code'])) applicant_kwargs['homeAddress']['stateCode'] = app_fields['state_code'] # add tax id if len(app_fields['tax_id']) < 10: tax_id_temp = app_fields['tax_id'] tax_string = tax_id_temp[0:3] + '-' + tax_id[3:5] + '-' + tax_id[5:] else: tax_string = tax_id applicant_kwargs['taxId'] = tax_string # add date of birth if not '-' in app_fields['date_of_birth']: dob_temp = app_fields['date_of_birth'] app_fields['date_of_birth'] = dob_temp[0:4] + '-' + dob_temp[4:6] + '-' + dob_temp[6:8] if not len(app_fields['date_of_birth']) == 10: raise ValueError('%s(date_of_birth=%s) must be in ISO format YYYY-MM-DD or YYYYMMDD' % (title, app_fields['date_of_birth'])) applicant_kwargs['dateOfBirth'] = app_fields['date_of_birth'] # add citizenship country from labpack.datasets.iso_3166 import compile_map as map_3166 country_codes = map_3166() if not app_fields['citizenship_country'] in country_codes.keys(): raise ValueError('%s(citizenship_country=%s) must be a valid 3 letter country code.' % (title, app_fields['citizenship_country'])) applicant_kwargs['citizenshipCountry'] = app_fields['citizenship_country'] # add secondary citizenship country if app_fields['secondary_citizenship_country']: if not app_fields['secondary_citizenship_country'] in country_codes.keys(): raise ValueError('%s(secondary_citizenship_country=%s) must be a valid 3 letter country code.' % (title, app_fields['secondary_citizenship_country'])) applicant_kwargs['secondaryCitizenshipCountry'] = app_fields['secondary_citizenship_country'] # add job title if app_fields['job_title']: applicant_kwargs['jobTitle'] = app_fields['job_title'] # add annual income field if app_fields['annual_income']: income_category = 250000 income_list = [ { 50000: 25000 }, { 100000: 75000 }, { 150000: 125000 }, { 250000: 200000 } ] for level in income_list: key, value = next(iter(level.items())) if app_fields['annual_income'] > int(key): continue elif int(key) != 25000: income_category = int(value) break applicant_kwargs['annualIncome'] = income_category # add phone number fields phone_kwargs = { 'phoneNumber': app_fields['phone_number'], 'acceptedTcpa': app_fields['accept_tcpa'] } if phone_type == 'mobile': applicant_kwargs['mobilePhoneNumber'] = phone_kwargs elif phone_type == 'home': applicant_kwargs['homePhoneNumber'] = phone_kwargs elif phone_type == 'work': applicant_kwargs['workPhoneNumber'] = phone_kwargs # add applicant fields to data kwargs applicant_copy = deepcopy(applicant_kwargs) applicant_list.append(applicant_copy) # construct data fields data_kwargs = { 'applicants': applicant_list, 'productId': str(product_id) } # add cd term if product_id == '3500': if not cd_term: raise IndexError('%s(cd_term=0) must not be empty if product_id=3500') else: # TODO validate cd terms in account product data_kwargs['cdTerm'] = cd_term # add funding details funding_details = { 'fundingType': funding_type, 'fundingAmount': funding_amount, 'externalAccountDetails': { 'accountNumber': account_number, 'bankABANumber': routing_number, 'accountOwnership': account_owner } } data_kwargs['fundingDetails'] = funding_details # add terms and conditions term_details = { 'acceptAccountDisclosures': accept_terms, 'acceptPaperlessAgreement': accept_terms, 'acceptFraudProtection': accept_terms } data_kwargs['termsAndConditions'] = term_details # send request details = self._requests(url, method='POST', headers=headers_kwargs, data=data_kwargs, errors=error_map) return details
def function[account_application, parameter[self, customer_ip, first_name, last_name, tax_id, date_of_birth, address_line_1, city_name, state_code, postal_code, phone_number, email_address, citizenship_country, employment_status, product_id, funding_amount, account_number, routing_number, backup_withholding, phone_type, accept_tcpa, accept_terms, address_line_2, middle_name, tax_id_type, secondary_citizenship_country, job_title, annual_income, cd_term, funding_type, account_owner, secondary_application]]: constant[ a method to submit application for new account :param customer_ip: string with ip address of applicant :param first_name: string with first name of applicant :param last_name: string with last name of applicant :param tax_id: string with tax id number of applicant :param date_of_birth: string with ISO format of date of birth of applicant :param address_line_1: string with first line of street address of applicant :param city_name: string with name of city of address of applicant :param state_code: string with code for the state of address of applicant :param postal_code: string with postal code of address of applicant :param phone_number: string with phone number and area code of applicant :param email_address: string with email address of applicant :param citizenship_country: string with ISO 3166 alpha-3 country code of citizenship of applicant :param employment_status: string with employment status of applicant :param product_id: integer with id of account product to apply for :param funding_amount: float with amount of dollars to initially fund account :param account_number: string with pre-existing bank account number of applicant :param routing_number: string with aba routing number for bank of pre-existing account of applicant :param backup_withholding: [optional] boolean to indicate backup withholding on accounts of applicant :param phone_type: [optional] string with type of phone of applicant :param accept_tcpa: boolean to accept to be contacted by citizen one marketing on their phone number :param accept_terms: boolean to accept the terms and conditions associated with new account :param address_line_2: [optional] string with second line of address of applicant :param middle_name: [optional] string with middle name of applicant :param tax_id_type: string with type of tax id of applicant :param secondary_citizenship_country: [optional] string with ISO 3166 alpha-3 country code of secondary citizenship :param job_title: [optional] string with job title of applicant :param annual_income: [optional] integer with dollar value of annual income of applicant :param cd_term: [optional] string with term for the cd account product to apply for :param funding_type: string with funding method selected by the applicant to fund new account :param account_owner: string with role of applicant who owns pre-existing bank account :param secondary_application: dictionary with applicant fields of secondary account holder :return: dictionary with successful response details in ['json'] key response details: { "error": "", "code": 200, "method": "GET", "url": "https://...", "headers": { "Location": "https://www.capitalone.com" }, "json": { "applicationId": "adfasdf812381asdf", "applicationStatus": "Approved", "applicationStatusDescription": "The application is approved and account is created", "bankABANumber": "031176110", "accountNumber": "12345678909876" } } ] variable[title] assign[=] binary_operation[constant[%s.account_application] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__] from relative_module[copy] import module[deepcopy] variable[input_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c7100>, <ast.Constant object at 0x7da20c6c4f70>, <ast.Constant object at 0x7da20c6c7f70>, <ast.Constant object at 0x7da20c6c7400>, <ast.Constant object at 0x7da20c6c7190>, <ast.Constant object at 0x7da20c6c4b20>, <ast.Constant object at 0x7da20c6c7520>, <ast.Constant object at 0x7da20c6c5ab0>, <ast.Constant object at 0x7da20c6c5120>, <ast.Constant object at 0x7da20c6c6e60>, <ast.Constant object at 0x7da20c6c63e0>], [<ast.Name object at 0x7da20c6c6770>, <ast.Name object at 0x7da20c6c6080>, <ast.Name object at 0x7da20c6c5030>, <ast.Name object at 0x7da20c6c73d0>, <ast.Name object at 0x7da20c6c4c10>, <ast.Name object at 0x7da20c6c71c0>, <ast.Name object at 0x7da20c6c6a10>, <ast.Name object at 0x7da20c6c7a30>, <ast.Name object at 0x7da20c6c5810>, <ast.Name object at 0x7da20c6c76d0>, <ast.Name object at 0x7da20c6c7d60>]] for taget[tuple[[<ast.Name object at 0x7da20c6c4b80>, <ast.Name object at 0x7da20c6c6440>]]] in starred[call[name[input_fields].items, parameter[]]] begin[:] variable[object_title] assign[=] binary_operation[constant[%s(%s=%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6c5870>, <ast.Name object at 0x7da20c6c5c00>, <ast.Call object at 0x7da20c6c5150>]]] call[name[self].fields.validate, parameter[name[value], binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[key]], name[object_title]]] variable[app_fields] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5ff0>, <ast.Constant object at 0x7da20c6c6f50>, <ast.Constant object at 0x7da20c6c4160>, <ast.Constant object at 0x7da20c6c6290>, <ast.Constant object at 0x7da20c6c56c0>, <ast.Constant object at 0x7da20c6c7eb0>, <ast.Constant object at 0x7da20c6c6c50>, <ast.Constant object at 0x7da20c6c4850>, <ast.Constant object at 0x7da20c6c4970>, <ast.Constant object at 0x7da20c6c66b0>, <ast.Constant object at 0x7da20c6c7fa0>, <ast.Constant object at 0x7da20c6c60b0>, <ast.Constant object at 0x7da20c6c72e0>, <ast.Constant object at 0x7da20c6c70a0>, <ast.Constant object at 0x7da20c6c4100>, <ast.Constant object at 0x7da20c6c6710>, <ast.Constant object at 0x7da20c6c5ba0>, <ast.Constant object at 0x7da20c6c42e0>, <ast.Constant object at 0x7da20c6c7610>, <ast.Constant object at 0x7da20c6c7f10>], [<ast.Name object at 0x7da20c6c6380>, <ast.Name object at 0x7da20c6c6e00>, <ast.Name object at 0x7da20c6c5f60>, <ast.Name object at 0x7da20c6c79d0>, <ast.Name object at 0x7da20c6c59f0>, <ast.Name object at 0x7da20c6c4700>, <ast.Name object at 0x7da20c6c4880>, <ast.Name object at 0x7da20c6c6b30>, <ast.Name object at 0x7da20c6c74c0>, <ast.Name object at 0x7da20c6c78b0>, <ast.Name object at 0x7da20c6c6bf0>, <ast.Name object at 0x7da20c6c7b80>, <ast.Name object at 0x7da20c6c5ae0>, <ast.Name object at 0x7da20c6c7dc0>, <ast.Name object at 0x7da20c6c7640>, <ast.Name object at 0x7da20c6c7ac0>, <ast.Name object at 0x7da20c6c7670>, <ast.Name object at 0x7da20c6c78e0>, <ast.Name object at 0x7da20c6c6f20>, <ast.Name object at 0x7da20c6c4190>]] for taget[tuple[[<ast.Name object at 0x7da20c6c6050>, <ast.Name object at 0x7da20c6c5660>]]] in starred[call[name[app_fields].items, parameter[]]] begin[:] variable[object_title] assign[=] binary_operation[constant[%s(%s=%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6c66e0>, <ast.Name object at 0x7da20c6c4d90>, <ast.Call object at 0x7da20c6c4d00>]]] call[name[self].fields.validate, parameter[name[value], binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[key]], name[object_title]]] variable[url] assign[=] binary_operation[name[self].deposits_endpoint + constant[account-application]] variable[error_map] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c61d0>, <ast.Constant object at 0x7da20c6c4430>], [<ast.Constant object at 0x7da20c6c6260>, <ast.Constant object at 0x7da20c6c5db0>]] variable[headers_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c6ce0>], [<ast.Name object at 0x7da20c6c63b0>]] variable[applicant_list] assign[=] list[[]] variable[applicants] assign[=] list[[<ast.Constant object at 0x7da20c6c55a0>]] if name[secondary_application] begin[:] call[name[applicants].append, parameter[constant[secondary]]] for taget[name[applicant]] in starred[name[applicants]] begin[:] if compare[name[applicant] equal[==] constant[secondary]] begin[:] variable[app_fields] assign[=] name[secondary_application] for taget[tuple[[<ast.Name object at 0x7da20c6c6a70>, <ast.Name object at 0x7da20c6c5000>]]] in starred[call[name[app_fields].items, parameter[]]] begin[:] variable[object_title] assign[=] binary_operation[constant[%s(%s=%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6c5bd0>, <ast.Name object at 0x7da20c6c5960>, <ast.Call object at 0x7da20c6c5360>]]] call[name[self].fields.validate, parameter[name[value], binary_operation[constant[.%s] <ast.Mod object at 0x7da2590d6920> name[key]], name[object_title]]] variable[applicant_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5450>, <ast.Constant object at 0x7da20c6c58d0>, <ast.Constant object at 0x7da20c6c5270>, <ast.Constant object at 0x7da20c6c5750>, <ast.Constant object at 0x7da20c6c7730>, <ast.Constant object at 0x7da20c6c5090>, <ast.Constant object at 0x7da20c6c42b0>, <ast.Constant object at 0x7da20c6c7a60>], [<ast.Name object at 0x7da20c6c70d0>, <ast.Subscript object at 0x7da20c6c43d0>, <ast.Subscript object at 0x7da20c6c72b0>, <ast.Dict object at 0x7da20c6c5cf0>, <ast.Subscript object at 0x7da20c6c7af0>, <ast.Subscript object at 0x7da20c6c6470>, <ast.Subscript object at 0x7da20c6c7340>, <ast.Subscript object at 0x7da20c6c64d0>]] if call[name[app_fields]][constant[middle_name]] begin[:] call[name[applicant_kwargs]][constant[middleName]] assign[=] call[name[app_fields]][constant[middle_name]] from relative_module[labpack.datasets.iso_3166_2_US] import module[compile_map] variable[state_codes] assign[=] call[name[map_3166_2], parameter[]] if <ast.UnaryOp object at 0x7da20c6c41f0> begin[:] <ast.Raise object at 0x7da20c6c7e20> call[call[name[applicant_kwargs]][constant[homeAddress]]][constant[stateCode]] assign[=] call[name[app_fields]][constant[state_code]] if compare[call[name[len], parameter[call[name[app_fields]][constant[tax_id]]]] less[<] constant[10]] begin[:] variable[tax_id_temp] assign[=] call[name[app_fields]][constant[tax_id]] variable[tax_string] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[call[name[tax_id_temp]][<ast.Slice object at 0x7da2047e82e0>] + constant[-]] + call[name[tax_id]][<ast.Slice object at 0x7da2047e9840>]] + constant[-]] + call[name[tax_id]][<ast.Slice object at 0x7da2047e9450>]] call[name[applicant_kwargs]][constant[taxId]] assign[=] name[tax_string] if <ast.UnaryOp object at 0x7da2047e9ea0> begin[:] variable[dob_temp] assign[=] call[name[app_fields]][constant[date_of_birth]] call[name[app_fields]][constant[date_of_birth]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[call[name[dob_temp]][<ast.Slice object at 0x7da2047ea3e0>] + constant[-]] + call[name[dob_temp]][<ast.Slice object at 0x7da2047eb0a0>]] + constant[-]] + call[name[dob_temp]][<ast.Slice object at 0x7da2047ea8f0>]] if <ast.UnaryOp object at 0x7da2047eba30> begin[:] <ast.Raise object at 0x7da2047ebcd0> call[name[applicant_kwargs]][constant[dateOfBirth]] assign[=] call[name[app_fields]][constant[date_of_birth]] from relative_module[labpack.datasets.iso_3166] import module[compile_map] variable[country_codes] assign[=] call[name[map_3166], parameter[]] if <ast.UnaryOp object at 0x7da2047ea470> begin[:] <ast.Raise object at 0x7da2047e8580> call[name[applicant_kwargs]][constant[citizenshipCountry]] assign[=] call[name[app_fields]][constant[citizenship_country]] if call[name[app_fields]][constant[secondary_citizenship_country]] begin[:] if <ast.UnaryOp object at 0x7da2047eaa70> begin[:] <ast.Raise object at 0x7da2047e9f90> call[name[applicant_kwargs]][constant[secondaryCitizenshipCountry]] assign[=] call[name[app_fields]][constant[secondary_citizenship_country]] if call[name[app_fields]][constant[job_title]] begin[:] call[name[applicant_kwargs]][constant[jobTitle]] assign[=] call[name[app_fields]][constant[job_title]] if call[name[app_fields]][constant[annual_income]] begin[:] variable[income_category] assign[=] constant[250000] variable[income_list] assign[=] list[[<ast.Dict object at 0x7da204347f70>, <ast.Dict object at 0x7da2043450c0>, <ast.Dict object at 0x7da204347490>, <ast.Dict object at 0x7da204347d30>]] for taget[name[level]] in starred[name[income_list]] begin[:] <ast.Tuple object at 0x7da204344850> assign[=] call[name[next], parameter[call[name[iter], parameter[call[name[level].items, parameter[]]]]]] if compare[call[name[app_fields]][constant[annual_income]] greater[>] call[name[int], parameter[name[key]]]] begin[:] continue call[name[applicant_kwargs]][constant[annualIncome]] assign[=] name[income_category] variable[phone_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da204344280>, <ast.Constant object at 0x7da204344fd0>], [<ast.Subscript object at 0x7da204346920>, <ast.Subscript object at 0x7da204347220>]] if compare[name[phone_type] equal[==] constant[mobile]] begin[:] call[name[applicant_kwargs]][constant[mobilePhoneNumber]] assign[=] name[phone_kwargs] variable[applicant_copy] assign[=] call[name[deepcopy], parameter[name[applicant_kwargs]]] call[name[applicant_list].append, parameter[name[applicant_copy]]] variable[data_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da204344c10>, <ast.Constant object at 0x7da204344100>], [<ast.Name object at 0x7da204346ce0>, <ast.Call object at 0x7da204344b20>]] if compare[name[product_id] equal[==] constant[3500]] begin[:] if <ast.UnaryOp object at 0x7da204347ee0> begin[:] <ast.Raise object at 0x7da2043449a0> variable[funding_details] assign[=] dictionary[[<ast.Constant object at 0x7da204346890>, <ast.Constant object at 0x7da2043479d0>, <ast.Constant object at 0x7da204346980>], [<ast.Name object at 0x7da204347040>, <ast.Name object at 0x7da204346fe0>, <ast.Dict object at 0x7da2043472e0>]] call[name[data_kwargs]][constant[fundingDetails]] assign[=] name[funding_details] variable[term_details] assign[=] dictionary[[<ast.Constant object at 0x7da204344df0>, <ast.Constant object at 0x7da204344f40>, <ast.Constant object at 0x7da204345720>], [<ast.Name object at 0x7da204346320>, <ast.Name object at 0x7da2043473d0>, <ast.Name object at 0x7da204347eb0>]] call[name[data_kwargs]][constant[termsAndConditions]] assign[=] name[term_details] variable[details] assign[=] call[name[self]._requests, parameter[name[url]]] return[name[details]]
keyword[def] identifier[account_application] ( identifier[self] , identifier[customer_ip] , identifier[first_name] , identifier[last_name] , identifier[tax_id] , identifier[date_of_birth] , identifier[address_line_1] , identifier[city_name] , identifier[state_code] , identifier[postal_code] , identifier[phone_number] , identifier[email_address] , identifier[citizenship_country] , identifier[employment_status] , identifier[product_id] , identifier[funding_amount] , identifier[account_number] , identifier[routing_number] , identifier[backup_withholding] = keyword[False] , identifier[phone_type] = literal[string] , identifier[accept_tcpa] = keyword[False] , identifier[accept_terms] = keyword[True] , identifier[address_line_2] = literal[string] , identifier[middle_name] = literal[string] , identifier[tax_id_type] = literal[string] , identifier[secondary_citizenship_country] = literal[string] , identifier[job_title] = literal[string] , identifier[annual_income] = literal[int] , identifier[cd_term] = literal[string] , identifier[funding_type] = literal[string] , identifier[account_owner] = literal[string] , identifier[secondary_application] = keyword[None] ): literal[string] identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] keyword[from] identifier[copy] keyword[import] identifier[deepcopy] identifier[input_fields] ={ literal[string] : identifier[customer_ip] , literal[string] : identifier[product_id] , literal[string] : identifier[cd_term] , literal[string] : identifier[funding_type] , literal[string] : identifier[funding_amount] , literal[string] : identifier[account_number] , literal[string] : identifier[routing_number] , literal[string] : identifier[account_owner] , literal[string] : identifier[accept_terms] , literal[string] : identifier[accept_tcpa] , literal[string] : identifier[secondary_application] } keyword[for] identifier[key] , identifier[value] keyword[in] identifier[input_fields] . identifier[items] (): identifier[object_title] = literal[string] %( identifier[title] , identifier[key] , identifier[str] ( identifier[value] )) identifier[self] . identifier[fields] . identifier[validate] ( identifier[value] , literal[string] % identifier[key] , identifier[object_title] ) identifier[app_fields] ={ literal[string] : identifier[address_line_1] , literal[string] : identifier[address_line_2] , literal[string] : identifier[city_name] , literal[string] : identifier[state_code] , literal[string] : identifier[postal_code] , literal[string] : identifier[first_name] , literal[string] : identifier[middle_name] , literal[string] : identifier[last_name] , literal[string] : identifier[tax_id_type] , literal[string] : identifier[tax_id] , literal[string] : identifier[date_of_birth] , literal[string] : identifier[email_address] , literal[string] : identifier[backup_withholding] , literal[string] : identifier[citizenship_country] , literal[string] : identifier[secondary_citizenship_country] , literal[string] : identifier[employment_status] , literal[string] : identifier[job_title] , literal[string] : identifier[annual_income] , literal[string] : identifier[phone_number] , literal[string] : identifier[phone_type] } keyword[for] identifier[key] , identifier[value] keyword[in] identifier[app_fields] . identifier[items] (): identifier[object_title] = literal[string] %( identifier[title] , identifier[key] , identifier[str] ( identifier[value] )) identifier[self] . identifier[fields] . identifier[validate] ( identifier[value] , literal[string] % identifier[key] , identifier[object_title] ) identifier[url] = identifier[self] . identifier[deposits_endpoint] + literal[string] identifier[error_map] ={ literal[int] : literal[string] , literal[int] : literal[string] } identifier[headers_kwargs] ={ literal[string] : identifier[customer_ip] } identifier[applicant_list] =[] identifier[applicants] =[ literal[string] ] keyword[if] identifier[secondary_application] : identifier[applicants] . identifier[append] ( literal[string] ) keyword[for] identifier[applicant] keyword[in] identifier[applicants] : keyword[if] identifier[applicant] == literal[string] : identifier[app_fields] = identifier[secondary_application] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[app_fields] . identifier[items] (): identifier[object_title] = literal[string] %( identifier[title] , identifier[key] , identifier[str] ( identifier[value] )) identifier[self] . identifier[fields] . identifier[validate] ( identifier[value] , literal[string] % identifier[key] , identifier[object_title] ) identifier[applicant_kwargs] ={ literal[string] : identifier[applicant] , literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ], literal[string] :{ literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ] }, literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ] } keyword[if] identifier[app_fields] [ literal[string] ]: identifier[applicant_kwargs] [ literal[string] ]= identifier[app_fields] [ literal[string] ] keyword[from] identifier[labpack] . identifier[datasets] . identifier[iso_3166_2_US] keyword[import] identifier[compile_map] keyword[as] identifier[map_3166_2] identifier[state_codes] = identifier[map_3166_2] () keyword[if] keyword[not] identifier[app_fields] [ literal[string] ] keyword[in] identifier[state_codes] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[title] , identifier[app_fields] [ literal[string] ])) identifier[applicant_kwargs] [ literal[string] ][ literal[string] ]= identifier[app_fields] [ literal[string] ] keyword[if] identifier[len] ( identifier[app_fields] [ literal[string] ])< literal[int] : identifier[tax_id_temp] = identifier[app_fields] [ literal[string] ] identifier[tax_string] = identifier[tax_id_temp] [ literal[int] : literal[int] ]+ literal[string] + identifier[tax_id] [ literal[int] : literal[int] ]+ literal[string] + identifier[tax_id] [ literal[int] :] keyword[else] : identifier[tax_string] = identifier[tax_id] identifier[applicant_kwargs] [ literal[string] ]= identifier[tax_string] keyword[if] keyword[not] literal[string] keyword[in] identifier[app_fields] [ literal[string] ]: identifier[dob_temp] = identifier[app_fields] [ literal[string] ] identifier[app_fields] [ literal[string] ]= identifier[dob_temp] [ literal[int] : literal[int] ]+ literal[string] + identifier[dob_temp] [ literal[int] : literal[int] ]+ literal[string] + identifier[dob_temp] [ literal[int] : literal[int] ] keyword[if] keyword[not] identifier[len] ( identifier[app_fields] [ literal[string] ])== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[title] , identifier[app_fields] [ literal[string] ])) identifier[applicant_kwargs] [ literal[string] ]= identifier[app_fields] [ literal[string] ] keyword[from] identifier[labpack] . identifier[datasets] . identifier[iso_3166] keyword[import] identifier[compile_map] keyword[as] identifier[map_3166] identifier[country_codes] = identifier[map_3166] () keyword[if] keyword[not] identifier[app_fields] [ literal[string] ] keyword[in] identifier[country_codes] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[title] , identifier[app_fields] [ literal[string] ])) identifier[applicant_kwargs] [ literal[string] ]= identifier[app_fields] [ literal[string] ] keyword[if] identifier[app_fields] [ literal[string] ]: keyword[if] keyword[not] identifier[app_fields] [ literal[string] ] keyword[in] identifier[country_codes] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[title] , identifier[app_fields] [ literal[string] ])) identifier[applicant_kwargs] [ literal[string] ]= identifier[app_fields] [ literal[string] ] keyword[if] identifier[app_fields] [ literal[string] ]: identifier[applicant_kwargs] [ literal[string] ]= identifier[app_fields] [ literal[string] ] keyword[if] identifier[app_fields] [ literal[string] ]: identifier[income_category] = literal[int] identifier[income_list] =[{ literal[int] : literal[int] },{ literal[int] : literal[int] },{ literal[int] : literal[int] },{ literal[int] : literal[int] }] keyword[for] identifier[level] keyword[in] identifier[income_list] : identifier[key] , identifier[value] = identifier[next] ( identifier[iter] ( identifier[level] . identifier[items] ())) keyword[if] identifier[app_fields] [ literal[string] ]> identifier[int] ( identifier[key] ): keyword[continue] keyword[elif] identifier[int] ( identifier[key] )!= literal[int] : identifier[income_category] = identifier[int] ( identifier[value] ) keyword[break] identifier[applicant_kwargs] [ literal[string] ]= identifier[income_category] identifier[phone_kwargs] ={ literal[string] : identifier[app_fields] [ literal[string] ], literal[string] : identifier[app_fields] [ literal[string] ] } keyword[if] identifier[phone_type] == literal[string] : identifier[applicant_kwargs] [ literal[string] ]= identifier[phone_kwargs] keyword[elif] identifier[phone_type] == literal[string] : identifier[applicant_kwargs] [ literal[string] ]= identifier[phone_kwargs] keyword[elif] identifier[phone_type] == literal[string] : identifier[applicant_kwargs] [ literal[string] ]= identifier[phone_kwargs] identifier[applicant_copy] = identifier[deepcopy] ( identifier[applicant_kwargs] ) identifier[applicant_list] . identifier[append] ( identifier[applicant_copy] ) identifier[data_kwargs] ={ literal[string] : identifier[applicant_list] , literal[string] : identifier[str] ( identifier[product_id] ) } keyword[if] identifier[product_id] == literal[string] : keyword[if] keyword[not] identifier[cd_term] : keyword[raise] identifier[IndexError] ( literal[string] ) keyword[else] : identifier[data_kwargs] [ literal[string] ]= identifier[cd_term] identifier[funding_details] ={ literal[string] : identifier[funding_type] , literal[string] : identifier[funding_amount] , literal[string] :{ literal[string] : identifier[account_number] , literal[string] : identifier[routing_number] , literal[string] : identifier[account_owner] } } identifier[data_kwargs] [ literal[string] ]= identifier[funding_details] identifier[term_details] ={ literal[string] : identifier[accept_terms] , literal[string] : identifier[accept_terms] , literal[string] : identifier[accept_terms] } identifier[data_kwargs] [ literal[string] ]= identifier[term_details] identifier[details] = identifier[self] . identifier[_requests] ( identifier[url] , identifier[method] = literal[string] , identifier[headers] = identifier[headers_kwargs] , identifier[data] = identifier[data_kwargs] , identifier[errors] = identifier[error_map] ) keyword[return] identifier[details]
def account_application(self, customer_ip, first_name, last_name, tax_id, date_of_birth, address_line_1, city_name, state_code, postal_code, phone_number, email_address, citizenship_country, employment_status, product_id, funding_amount, account_number, routing_number, backup_withholding=False, phone_type='mobile', accept_tcpa=False, accept_terms=True, address_line_2='', middle_name='', tax_id_type='SSN', secondary_citizenship_country='', job_title='', annual_income=0, cd_term='', funding_type='fundach', account_owner='primary', secondary_application=None): """ a method to submit application for new account :param customer_ip: string with ip address of applicant :param first_name: string with first name of applicant :param last_name: string with last name of applicant :param tax_id: string with tax id number of applicant :param date_of_birth: string with ISO format of date of birth of applicant :param address_line_1: string with first line of street address of applicant :param city_name: string with name of city of address of applicant :param state_code: string with code for the state of address of applicant :param postal_code: string with postal code of address of applicant :param phone_number: string with phone number and area code of applicant :param email_address: string with email address of applicant :param citizenship_country: string with ISO 3166 alpha-3 country code of citizenship of applicant :param employment_status: string with employment status of applicant :param product_id: integer with id of account product to apply for :param funding_amount: float with amount of dollars to initially fund account :param account_number: string with pre-existing bank account number of applicant :param routing_number: string with aba routing number for bank of pre-existing account of applicant :param backup_withholding: [optional] boolean to indicate backup withholding on accounts of applicant :param phone_type: [optional] string with type of phone of applicant :param accept_tcpa: boolean to accept to be contacted by citizen one marketing on their phone number :param accept_terms: boolean to accept the terms and conditions associated with new account :param address_line_2: [optional] string with second line of address of applicant :param middle_name: [optional] string with middle name of applicant :param tax_id_type: string with type of tax id of applicant :param secondary_citizenship_country: [optional] string with ISO 3166 alpha-3 country code of secondary citizenship :param job_title: [optional] string with job title of applicant :param annual_income: [optional] integer with dollar value of annual income of applicant :param cd_term: [optional] string with term for the cd account product to apply for :param funding_type: string with funding method selected by the applicant to fund new account :param account_owner: string with role of applicant who owns pre-existing bank account :param secondary_application: dictionary with applicant fields of secondary account holder :return: dictionary with successful response details in ['json'] key response details: { "error": "", "code": 200, "method": "GET", "url": "https://...", "headers": { "Location": "https://www.capitalone.com" }, "json": { "applicationId": "adfasdf812381asdf", "applicationStatus": "Approved", "applicationStatusDescription": "The application is approved and account is created", "bankABANumber": "031176110", "accountNumber": "12345678909876" } } """ title = '%s.account_application' % self.__class__.__name__ from copy import deepcopy # validate general inputs input_fields = {'customer_ip': customer_ip, 'product_id': product_id, 'cd_term': cd_term, 'funding_type': funding_type, 'funding_amount': funding_amount, 'account_number': account_number, 'routing_number': routing_number, 'account_owner': account_owner, 'accept_terms': accept_terms, 'accept_tcpa': accept_tcpa, 'secondary_application': secondary_application} for (key, value) in input_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # depends on [control=['for'], data=[]] # validate applicant fields app_fields = {'address_line_1': address_line_1, 'address_line_2': address_line_2, 'city_name': city_name, 'state_code': state_code, 'postal_code': postal_code, 'first_name': first_name, 'middle_name': middle_name, 'last_name': last_name, 'tax_id_type': tax_id_type, 'tax_id': tax_id, 'date_of_birth': date_of_birth, 'email_address': email_address, 'backup_withholding': backup_withholding, 'citizenship_country': citizenship_country, 'secondary_citizenship_country': secondary_citizenship_country, 'employment_status': employment_status, 'job_title': job_title, 'annual_income': annual_income, 'phone_number': phone_number, 'phone_type': phone_type} for (key, value) in app_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # depends on [control=['for'], data=[]] # construct url url = self.deposits_endpoint + 'account-application' # construct method specific errors error_map = {404: 'Not Found. No products found for the provided productId.', 409: 'The application could not be processed due to a business error. Currently, this status is only returned when an existing Capital One customer attempts to open a new account using this API.'} # construct headers headers_kwargs = {'Customer-IP-Address': customer_ip} # construct applicant list applicant_list = [] applicants = ['primary'] if secondary_application: applicants.append('secondary') # depends on [control=['if'], data=[]] # iterate over applicants for applicant in applicants: # substitute in secondary application fields if applicant == 'secondary': app_fields = secondary_application for (key, value) in app_fields.items(): object_title = '%s(%s=%s)' % (title, key, str(value)) self.fields.validate(value, '.%s' % key, object_title) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # construct applicant kwargs applicant_kwargs = {'applicantRole': applicant, 'firstName': app_fields['first_name'], 'lastName': app_fields['last_name'], 'homeAddress': {'addressLine1': app_fields['address_line_1'], 'city': app_fields['city_name'], 'postalCode': app_fields['postal_code']}, 'taxIdType': app_fields['tax_id_type'], 'emailAddress': app_fields['email_address'], 'backupWithholding': app_fields['backup_withholding'], 'employmentStatus': app_fields['employment_status']} # add optional middle name and second address if app_fields['middle_name']: applicant_kwargs['middleName'] = app_fields['middle_name'] # depends on [control=['if'], data=[]] # add state code from labpack.datasets.iso_3166_2_US import compile_map as map_3166_2 state_codes = map_3166_2() if not app_fields['state_code'] in state_codes.keys(): raise ValueError('%s(state_code=%s) must be a valid 3 letter country code.' % (title, app_fields['state_code'])) # depends on [control=['if'], data=[]] applicant_kwargs['homeAddress']['stateCode'] = app_fields['state_code'] # add tax id if len(app_fields['tax_id']) < 10: tax_id_temp = app_fields['tax_id'] tax_string = tax_id_temp[0:3] + '-' + tax_id[3:5] + '-' + tax_id[5:] # depends on [control=['if'], data=[]] else: tax_string = tax_id applicant_kwargs['taxId'] = tax_string # add date of birth if not '-' in app_fields['date_of_birth']: dob_temp = app_fields['date_of_birth'] app_fields['date_of_birth'] = dob_temp[0:4] + '-' + dob_temp[4:6] + '-' + dob_temp[6:8] # depends on [control=['if'], data=[]] if not len(app_fields['date_of_birth']) == 10: raise ValueError('%s(date_of_birth=%s) must be in ISO format YYYY-MM-DD or YYYYMMDD' % (title, app_fields['date_of_birth'])) # depends on [control=['if'], data=[]] applicant_kwargs['dateOfBirth'] = app_fields['date_of_birth'] # add citizenship country from labpack.datasets.iso_3166 import compile_map as map_3166 country_codes = map_3166() if not app_fields['citizenship_country'] in country_codes.keys(): raise ValueError('%s(citizenship_country=%s) must be a valid 3 letter country code.' % (title, app_fields['citizenship_country'])) # depends on [control=['if'], data=[]] applicant_kwargs['citizenshipCountry'] = app_fields['citizenship_country'] # add secondary citizenship country if app_fields['secondary_citizenship_country']: if not app_fields['secondary_citizenship_country'] in country_codes.keys(): raise ValueError('%s(secondary_citizenship_country=%s) must be a valid 3 letter country code.' % (title, app_fields['secondary_citizenship_country'])) # depends on [control=['if'], data=[]] applicant_kwargs['secondaryCitizenshipCountry'] = app_fields['secondary_citizenship_country'] # depends on [control=['if'], data=[]] # add job title if app_fields['job_title']: applicant_kwargs['jobTitle'] = app_fields['job_title'] # depends on [control=['if'], data=[]] # add annual income field if app_fields['annual_income']: income_category = 250000 income_list = [{50000: 25000}, {100000: 75000}, {150000: 125000}, {250000: 200000}] for level in income_list: (key, value) = next(iter(level.items())) if app_fields['annual_income'] > int(key): continue # depends on [control=['if'], data=[]] elif int(key) != 25000: income_category = int(value) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['level']] applicant_kwargs['annualIncome'] = income_category # depends on [control=['if'], data=[]] # add phone number fields phone_kwargs = {'phoneNumber': app_fields['phone_number'], 'acceptedTcpa': app_fields['accept_tcpa']} if phone_type == 'mobile': applicant_kwargs['mobilePhoneNumber'] = phone_kwargs # depends on [control=['if'], data=[]] elif phone_type == 'home': applicant_kwargs['homePhoneNumber'] = phone_kwargs # depends on [control=['if'], data=[]] elif phone_type == 'work': applicant_kwargs['workPhoneNumber'] = phone_kwargs # depends on [control=['if'], data=[]] # add applicant fields to data kwargs applicant_copy = deepcopy(applicant_kwargs) applicant_list.append(applicant_copy) # depends on [control=['for'], data=['applicant']] # construct data fields data_kwargs = {'applicants': applicant_list, 'productId': str(product_id)} # add cd term if product_id == '3500': if not cd_term: raise IndexError('%s(cd_term=0) must not be empty if product_id=3500') # depends on [control=['if'], data=[]] else: # TODO validate cd terms in account product data_kwargs['cdTerm'] = cd_term # depends on [control=['if'], data=[]] # add funding details funding_details = {'fundingType': funding_type, 'fundingAmount': funding_amount, 'externalAccountDetails': {'accountNumber': account_number, 'bankABANumber': routing_number, 'accountOwnership': account_owner}} data_kwargs['fundingDetails'] = funding_details # add terms and conditions term_details = {'acceptAccountDisclosures': accept_terms, 'acceptPaperlessAgreement': accept_terms, 'acceptFraudProtection': accept_terms} data_kwargs['termsAndConditions'] = term_details # send request details = self._requests(url, method='POST', headers=headers_kwargs, data=data_kwargs, errors=error_map) return details
def add_connection_throttle(self, loadbalancer, maxConnectionRate=None, maxConnections=None, minConnections=None, rateInterval=None): """ Updates the connection throttling information for the load balancer with the supplied values. At least one of the parameters must be supplied. """ return loadbalancer.add_connection_throttle( maxConnectionRate=maxConnectionRate, maxConnections=maxConnections, minConnections=minConnections, rateInterval=rateInterval)
def function[add_connection_throttle, parameter[self, loadbalancer, maxConnectionRate, maxConnections, minConnections, rateInterval]]: constant[ Updates the connection throttling information for the load balancer with the supplied values. At least one of the parameters must be supplied. ] return[call[name[loadbalancer].add_connection_throttle, parameter[]]]
keyword[def] identifier[add_connection_throttle] ( identifier[self] , identifier[loadbalancer] , identifier[maxConnectionRate] = keyword[None] , identifier[maxConnections] = keyword[None] , identifier[minConnections] = keyword[None] , identifier[rateInterval] = keyword[None] ): literal[string] keyword[return] identifier[loadbalancer] . identifier[add_connection_throttle] ( identifier[maxConnectionRate] = identifier[maxConnectionRate] , identifier[maxConnections] = identifier[maxConnections] , identifier[minConnections] = identifier[minConnections] , identifier[rateInterval] = identifier[rateInterval] )
def add_connection_throttle(self, loadbalancer, maxConnectionRate=None, maxConnections=None, minConnections=None, rateInterval=None): """ Updates the connection throttling information for the load balancer with the supplied values. At least one of the parameters must be supplied. """ return loadbalancer.add_connection_throttle(maxConnectionRate=maxConnectionRate, maxConnections=maxConnections, minConnections=minConnections, rateInterval=rateInterval)
def _filter(self, dict, keep): """ Remove any keys not in 'keep' """ if not keep: return dict result = {} for key, value in dict.iteritems(): if key in keep: result[key] = value return result
def function[_filter, parameter[self, dict, keep]]: constant[ Remove any keys not in 'keep' ] if <ast.UnaryOp object at 0x7da1b27e35b0> begin[:] return[name[dict]] variable[result] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b27e0820>, <ast.Name object at 0x7da1b27e05e0>]]] in starred[call[name[dict].iteritems, parameter[]]] begin[:] if compare[name[key] in name[keep]] begin[:] call[name[result]][name[key]] assign[=] name[value] return[name[result]]
keyword[def] identifier[_filter] ( identifier[self] , identifier[dict] , identifier[keep] ): literal[string] keyword[if] keyword[not] identifier[keep] : keyword[return] identifier[dict] identifier[result] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[dict] . identifier[iteritems] (): keyword[if] identifier[key] keyword[in] identifier[keep] : identifier[result] [ identifier[key] ]= identifier[value] keyword[return] identifier[result]
def _filter(self, dict, keep): """ Remove any keys not in 'keep' """ if not keep: return dict # depends on [control=['if'], data=[]] result = {} for (key, value) in dict.iteritems(): if key in keep: result[key] = value # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=[]] return result
def CheckOutput(*popenargs, **kwargs): """ Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib. """ process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, _ = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] error = subprocess.CalledProcessError(retcode, cmd) error.output = output raise error return retcode, output
def function[CheckOutput, parameter[]]: constant[ Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib. ] variable[process] assign[=] call[name[subprocess].Popen, parameter[<ast.Starred object at 0x7da204621210>]] <ast.Tuple object at 0x7da204621840> assign[=] call[name[process].communicate, parameter[]] variable[retcode] assign[=] call[name[process].poll, parameter[]] if name[retcode] begin[:] variable[cmd] assign[=] call[name[kwargs].get, parameter[constant[args]]] if compare[name[cmd] is constant[None]] begin[:] variable[cmd] assign[=] call[name[popenargs]][constant[0]] variable[error] assign[=] call[name[subprocess].CalledProcessError, parameter[name[retcode], name[cmd]]] name[error].output assign[=] name[output] <ast.Raise object at 0x7da204623490> return[tuple[[<ast.Name object at 0x7da204621cc0>, <ast.Name object at 0x7da2046225c0>]]]
keyword[def] identifier[CheckOutput] (* identifier[popenargs] ,** identifier[kwargs] ): literal[string] identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,* identifier[popenargs] ,** identifier[kwargs] ) identifier[output] , identifier[_] = identifier[process] . identifier[communicate] () identifier[retcode] = identifier[process] . identifier[poll] () keyword[if] identifier[retcode] : identifier[cmd] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[cmd] keyword[is] keyword[None] : identifier[cmd] = identifier[popenargs] [ literal[int] ] identifier[error] = identifier[subprocess] . identifier[CalledProcessError] ( identifier[retcode] , identifier[cmd] ) identifier[error] . identifier[output] = identifier[output] keyword[raise] identifier[error] keyword[return] identifier[retcode] , identifier[output]
def CheckOutput(*popenargs, **kwargs): """ Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib. """ process = subprocess.Popen(*popenargs, stdout=subprocess.PIPE, **kwargs) (output, _) = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get('args') if cmd is None: cmd = popenargs[0] # depends on [control=['if'], data=['cmd']] error = subprocess.CalledProcessError(retcode, cmd) error.output = output raise error # depends on [control=['if'], data=[]] return (retcode, output)
def score_n1(matrix, matrix_size): """\ Implements the penalty score feature 1. ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results - Table 11 (page 54) ============================================ ======================== ====== Feature Evaluation condition Points ============================================ ======================== ====== Adjacent modules in row/column in same color No. of modules = (5 + i) N1 + i ============================================ ======================== ====== N1 = 3 :param matrix: The matrix to evaluate :param matrix_size: The width (or height) of the matrix. :return int: The penalty score (feature 1) of the matrix. """ score = 0 for i in range(matrix_size): prev_bit_row, prev_bit_col = -1, -1 row_counter, col_counter = 0, 0 for j in range(matrix_size): # Row-wise bit = matrix[i][j] if bit == prev_bit_row: row_counter += 1 else: if row_counter >= 5: score += row_counter - 2 # N1 == 3 row_counter = 1 prev_bit_row = bit # Col-wise bit = matrix[j][i] if bit == prev_bit_col: col_counter += 1 else: if col_counter >= 5: score += col_counter - 2 # N1 == 3 col_counter = 1 prev_bit_col = bit if row_counter >= 5: score += row_counter - 2 # N1 == 3 if col_counter >= 5: score += col_counter - 2 # N1 == 3 return score
def function[score_n1, parameter[matrix, matrix_size]]: constant[ Implements the penalty score feature 1. ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results - Table 11 (page 54) ============================================ ======================== ====== Feature Evaluation condition Points ============================================ ======================== ====== Adjacent modules in row/column in same color No. of modules = (5 + i) N1 + i ============================================ ======================== ====== N1 = 3 :param matrix: The matrix to evaluate :param matrix_size: The width (or height) of the matrix. :return int: The penalty score (feature 1) of the matrix. ] variable[score] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[name[matrix_size]]]] begin[:] <ast.Tuple object at 0x7da204623bb0> assign[=] tuple[[<ast.UnaryOp object at 0x7da204623760>, <ast.UnaryOp object at 0x7da2046210c0>]] <ast.Tuple object at 0x7da204622c50> assign[=] tuple[[<ast.Constant object at 0x7da204621780>, <ast.Constant object at 0x7da204620070>]] for taget[name[j]] in starred[call[name[range], parameter[name[matrix_size]]]] begin[:] variable[bit] assign[=] call[call[name[matrix]][name[i]]][name[j]] if compare[name[bit] equal[==] name[prev_bit_row]] begin[:] <ast.AugAssign object at 0x7da204623d90> variable[bit] assign[=] call[call[name[matrix]][name[j]]][name[i]] if compare[name[bit] equal[==] name[prev_bit_col]] begin[:] <ast.AugAssign object at 0x7da204622350> if compare[name[row_counter] greater_or_equal[>=] constant[5]] begin[:] <ast.AugAssign object at 0x7da2046223b0> if compare[name[col_counter] greater_or_equal[>=] constant[5]] begin[:] <ast.AugAssign object at 0x7da204620880> return[name[score]]
keyword[def] identifier[score_n1] ( identifier[matrix] , identifier[matrix_size] ): literal[string] identifier[score] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[matrix_size] ): identifier[prev_bit_row] , identifier[prev_bit_col] =- literal[int] ,- literal[int] identifier[row_counter] , identifier[col_counter] = literal[int] , literal[int] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[matrix_size] ): identifier[bit] = identifier[matrix] [ identifier[i] ][ identifier[j] ] keyword[if] identifier[bit] == identifier[prev_bit_row] : identifier[row_counter] += literal[int] keyword[else] : keyword[if] identifier[row_counter] >= literal[int] : identifier[score] += identifier[row_counter] - literal[int] identifier[row_counter] = literal[int] identifier[prev_bit_row] = identifier[bit] identifier[bit] = identifier[matrix] [ identifier[j] ][ identifier[i] ] keyword[if] identifier[bit] == identifier[prev_bit_col] : identifier[col_counter] += literal[int] keyword[else] : keyword[if] identifier[col_counter] >= literal[int] : identifier[score] += identifier[col_counter] - literal[int] identifier[col_counter] = literal[int] identifier[prev_bit_col] = identifier[bit] keyword[if] identifier[row_counter] >= literal[int] : identifier[score] += identifier[row_counter] - literal[int] keyword[if] identifier[col_counter] >= literal[int] : identifier[score] += identifier[col_counter] - literal[int] keyword[return] identifier[score]
def score_n1(matrix, matrix_size): """ Implements the penalty score feature 1. ISO/IEC 18004:2015(E) -- 7.8.3 Evaluation of data masking results - Table 11 (page 54) ============================================ ======================== ====== Feature Evaluation condition Points ============================================ ======================== ====== Adjacent modules in row/column in same color No. of modules = (5 + i) N1 + i ============================================ ======================== ====== N1 = 3 :param matrix: The matrix to evaluate :param matrix_size: The width (or height) of the matrix. :return int: The penalty score (feature 1) of the matrix. """ score = 0 for i in range(matrix_size): (prev_bit_row, prev_bit_col) = (-1, -1) (row_counter, col_counter) = (0, 0) for j in range(matrix_size): # Row-wise bit = matrix[i][j] if bit == prev_bit_row: row_counter += 1 # depends on [control=['if'], data=[]] else: if row_counter >= 5: score += row_counter - 2 # N1 == 3 # depends on [control=['if'], data=['row_counter']] row_counter = 1 prev_bit_row = bit # Col-wise bit = matrix[j][i] if bit == prev_bit_col: col_counter += 1 # depends on [control=['if'], data=[]] else: if col_counter >= 5: score += col_counter - 2 # N1 == 3 # depends on [control=['if'], data=['col_counter']] col_counter = 1 prev_bit_col = bit # depends on [control=['for'], data=['j']] if row_counter >= 5: score += row_counter - 2 # N1 == 3 # depends on [control=['if'], data=['row_counter']] if col_counter >= 5: score += col_counter - 2 # N1 == 3 # depends on [control=['if'], data=['col_counter']] # depends on [control=['for'], data=['i']] return score
def parse_xml(data, handle_units): """Parse XML data returned by NCSS.""" root = ET.fromstring(data) return squish(parse_xml_dataset(root, handle_units))
def function[parse_xml, parameter[data, handle_units]]: constant[Parse XML data returned by NCSS.] variable[root] assign[=] call[name[ET].fromstring, parameter[name[data]]] return[call[name[squish], parameter[call[name[parse_xml_dataset], parameter[name[root], name[handle_units]]]]]]
keyword[def] identifier[parse_xml] ( identifier[data] , identifier[handle_units] ): literal[string] identifier[root] = identifier[ET] . identifier[fromstring] ( identifier[data] ) keyword[return] identifier[squish] ( identifier[parse_xml_dataset] ( identifier[root] , identifier[handle_units] ))
def parse_xml(data, handle_units): """Parse XML data returned by NCSS.""" root = ET.fromstring(data) return squish(parse_xml_dataset(root, handle_units))
def check_nocycles(Adj, verbosity=2): """\ Checks that there are no cycles in graph described by adjacancy matrix. Parameters ---------- Adj (np.array): adjancancy matrix of dimension (dim, dim) Returns ------- True if there is no cycle, False otherwise. """ dim = Adj.shape[0] for g in range(dim): v = np.zeros(dim) v[g] = 1 for i in range(dim): v = Adj.dot(v) if v[g] > 1e-10: if verbosity > 2: settings.m(0,Adj) settings.m(0,'contains a cycle of length',i+1, 'starting from node',g, '-> reject') return False return True
def function[check_nocycles, parameter[Adj, verbosity]]: constant[ Checks that there are no cycles in graph described by adjacancy matrix. Parameters ---------- Adj (np.array): adjancancy matrix of dimension (dim, dim) Returns ------- True if there is no cycle, False otherwise. ] variable[dim] assign[=] call[name[Adj].shape][constant[0]] for taget[name[g]] in starred[call[name[range], parameter[name[dim]]]] begin[:] variable[v] assign[=] call[name[np].zeros, parameter[name[dim]]] call[name[v]][name[g]] assign[=] constant[1] for taget[name[i]] in starred[call[name[range], parameter[name[dim]]]] begin[:] variable[v] assign[=] call[name[Adj].dot, parameter[name[v]]] if compare[call[name[v]][name[g]] greater[>] constant[1e-10]] begin[:] if compare[name[verbosity] greater[>] constant[2]] begin[:] call[name[settings].m, parameter[constant[0], name[Adj]]] call[name[settings].m, parameter[constant[0], constant[contains a cycle of length], binary_operation[name[i] + constant[1]], constant[starting from node], name[g], constant[-> reject]]] return[constant[False]] return[constant[True]]
keyword[def] identifier[check_nocycles] ( identifier[Adj] , identifier[verbosity] = literal[int] ): literal[string] identifier[dim] = identifier[Adj] . identifier[shape] [ literal[int] ] keyword[for] identifier[g] keyword[in] identifier[range] ( identifier[dim] ): identifier[v] = identifier[np] . identifier[zeros] ( identifier[dim] ) identifier[v] [ identifier[g] ]= literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[dim] ): identifier[v] = identifier[Adj] . identifier[dot] ( identifier[v] ) keyword[if] identifier[v] [ identifier[g] ]> literal[int] : keyword[if] identifier[verbosity] > literal[int] : identifier[settings] . identifier[m] ( literal[int] , identifier[Adj] ) identifier[settings] . identifier[m] ( literal[int] , literal[string] , identifier[i] + literal[int] , literal[string] , identifier[g] , literal[string] ) keyword[return] keyword[False] keyword[return] keyword[True]
def check_nocycles(Adj, verbosity=2): """ Checks that there are no cycles in graph described by adjacancy matrix. Parameters ---------- Adj (np.array): adjancancy matrix of dimension (dim, dim) Returns ------- True if there is no cycle, False otherwise. """ dim = Adj.shape[0] for g in range(dim): v = np.zeros(dim) v[g] = 1 for i in range(dim): v = Adj.dot(v) if v[g] > 1e-10: if verbosity > 2: settings.m(0, Adj) settings.m(0, 'contains a cycle of length', i + 1, 'starting from node', g, '-> reject') # depends on [control=['if'], data=[]] return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['g']] return True
def domain_name(): """Return a random domain name. Lowercased result of :py:func:`~forgery_py.forgery.name.company_name()` plus :py:func:`~top_level_domain()`. """ result = random.choice(get_dictionary('company_names')).strip() result += '.' + top_level_domain() return result.lower()
def function[domain_name, parameter[]]: constant[Return a random domain name. Lowercased result of :py:func:`~forgery_py.forgery.name.company_name()` plus :py:func:`~top_level_domain()`. ] variable[result] assign[=] call[call[name[random].choice, parameter[call[name[get_dictionary], parameter[constant[company_names]]]]].strip, parameter[]] <ast.AugAssign object at 0x7da1affc03a0> return[call[name[result].lower, parameter[]]]
keyword[def] identifier[domain_name] (): literal[string] identifier[result] = identifier[random] . identifier[choice] ( identifier[get_dictionary] ( literal[string] )). identifier[strip] () identifier[result] += literal[string] + identifier[top_level_domain] () keyword[return] identifier[result] . identifier[lower] ()
def domain_name(): """Return a random domain name. Lowercased result of :py:func:`~forgery_py.forgery.name.company_name()` plus :py:func:`~top_level_domain()`. """ result = random.choice(get_dictionary('company_names')).strip() result += '.' + top_level_domain() return result.lower()
def popitem(self): """Remove and return a random `(key, value)` pair.""" try: key = self.__choice(list(self)) except IndexError: raise KeyError('%s is empty' % self.__class__.__name__) else: return (key, self.pop(key))
def function[popitem, parameter[self]]: constant[Remove and return a random `(key, value)` pair.] <ast.Try object at 0x7da20c7c8640>
keyword[def] identifier[popitem] ( identifier[self] ): literal[string] keyword[try] : identifier[key] = identifier[self] . identifier[__choice] ( identifier[list] ( identifier[self] )) keyword[except] identifier[IndexError] : keyword[raise] identifier[KeyError] ( literal[string] % identifier[self] . identifier[__class__] . identifier[__name__] ) keyword[else] : keyword[return] ( identifier[key] , identifier[self] . identifier[pop] ( identifier[key] ))
def popitem(self): """Remove and return a random `(key, value)` pair.""" try: key = self.__choice(list(self)) # depends on [control=['try'], data=[]] except IndexError: raise KeyError('%s is empty' % self.__class__.__name__) # depends on [control=['except'], data=[]] else: return (key, self.pop(key))
def apply(self, root): """ Apply the import (rule) to the specified schema. If the schema does not already contain an import for the I{namespace} specified here, it is added. @param root: A schema root. @type root: L{Element} """ if not self.filter.match(root, self.ns): return if self.exists(root): return node = Element('import', ns=self.xsdns) node.set('namespace', self.ns) if self.location is not None: node.set('schemaLocation', self.location) log.debug('inserting: %s', node) root.insert(node)
def function[apply, parameter[self, root]]: constant[ Apply the import (rule) to the specified schema. If the schema does not already contain an import for the I{namespace} specified here, it is added. @param root: A schema root. @type root: L{Element} ] if <ast.UnaryOp object at 0x7da2041da950> begin[:] return[None] if call[name[self].exists, parameter[name[root]]] begin[:] return[None] variable[node] assign[=] call[name[Element], parameter[constant[import]]] call[name[node].set, parameter[constant[namespace], name[self].ns]] if compare[name[self].location is_not constant[None]] begin[:] call[name[node].set, parameter[constant[schemaLocation], name[self].location]] call[name[log].debug, parameter[constant[inserting: %s], name[node]]] call[name[root].insert, parameter[name[node]]]
keyword[def] identifier[apply] ( identifier[self] , identifier[root] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[filter] . identifier[match] ( identifier[root] , identifier[self] . identifier[ns] ): keyword[return] keyword[if] identifier[self] . identifier[exists] ( identifier[root] ): keyword[return] identifier[node] = identifier[Element] ( literal[string] , identifier[ns] = identifier[self] . identifier[xsdns] ) identifier[node] . identifier[set] ( literal[string] , identifier[self] . identifier[ns] ) keyword[if] identifier[self] . identifier[location] keyword[is] keyword[not] keyword[None] : identifier[node] . identifier[set] ( literal[string] , identifier[self] . identifier[location] ) identifier[log] . identifier[debug] ( literal[string] , identifier[node] ) identifier[root] . identifier[insert] ( identifier[node] )
def apply(self, root): """ Apply the import (rule) to the specified schema. If the schema does not already contain an import for the I{namespace} specified here, it is added. @param root: A schema root. @type root: L{Element} """ if not self.filter.match(root, self.ns): return # depends on [control=['if'], data=[]] if self.exists(root): return # depends on [control=['if'], data=[]] node = Element('import', ns=self.xsdns) node.set('namespace', self.ns) if self.location is not None: node.set('schemaLocation', self.location) # depends on [control=['if'], data=[]] log.debug('inserting: %s', node) root.insert(node)
def fix_variables(self, fixed): """Fix the value of the variables and remove it from a binary quadratic model. Args: fixed (dict): A dictionary of variable assignments. Examples: >>> bqm = dimod.BinaryQuadraticModel({'a': -.5, 'b': 0., 'c': 5}, {('a', 'b'): -1}, 0.0, dimod.SPIN) >>> bqm.fix_variables({'a': -1, 'b': +1}) """ for v, val in fixed.items(): self.fix_variable(v, val)
def function[fix_variables, parameter[self, fixed]]: constant[Fix the value of the variables and remove it from a binary quadratic model. Args: fixed (dict): A dictionary of variable assignments. Examples: >>> bqm = dimod.BinaryQuadraticModel({'a': -.5, 'b': 0., 'c': 5}, {('a', 'b'): -1}, 0.0, dimod.SPIN) >>> bqm.fix_variables({'a': -1, 'b': +1}) ] for taget[tuple[[<ast.Name object at 0x7da1b0763190>, <ast.Name object at 0x7da1b0762020>]]] in starred[call[name[fixed].items, parameter[]]] begin[:] call[name[self].fix_variable, parameter[name[v], name[val]]]
keyword[def] identifier[fix_variables] ( identifier[self] , identifier[fixed] ): literal[string] keyword[for] identifier[v] , identifier[val] keyword[in] identifier[fixed] . identifier[items] (): identifier[self] . identifier[fix_variable] ( identifier[v] , identifier[val] )
def fix_variables(self, fixed): """Fix the value of the variables and remove it from a binary quadratic model. Args: fixed (dict): A dictionary of variable assignments. Examples: >>> bqm = dimod.BinaryQuadraticModel({'a': -.5, 'b': 0., 'c': 5}, {('a', 'b'): -1}, 0.0, dimod.SPIN) >>> bqm.fix_variables({'a': -1, 'b': +1}) """ for (v, val) in fixed.items(): self.fix_variable(v, val) # depends on [control=['for'], data=[]]
def _get_sessionless_launch_url(self, context, context_id, tool_id): """ Get a sessionless launch url for an external tool. https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch """ url = context.format(context_id) + "/external_tools/sessionless_launch" params = {"id": tool_id} return self._get_resource(url, params)
def function[_get_sessionless_launch_url, parameter[self, context, context_id, tool_id]]: constant[ Get a sessionless launch url for an external tool. https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch ] variable[url] assign[=] binary_operation[call[name[context].format, parameter[name[context_id]]] + constant[/external_tools/sessionless_launch]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b020aa40>], [<ast.Name object at 0x7da1b02099c0>]] return[call[name[self]._get_resource, parameter[name[url], name[params]]]]
keyword[def] identifier[_get_sessionless_launch_url] ( identifier[self] , identifier[context] , identifier[context_id] , identifier[tool_id] ): literal[string] identifier[url] = identifier[context] . identifier[format] ( identifier[context_id] )+ literal[string] identifier[params] ={ literal[string] : identifier[tool_id] } keyword[return] identifier[self] . identifier[_get_resource] ( identifier[url] , identifier[params] )
def _get_sessionless_launch_url(self, context, context_id, tool_id): """ Get a sessionless launch url for an external tool. https://canvas.instructure.com/doc/api/external_tools.html#method.external_tools.generate_sessionless_launch """ url = context.format(context_id) + '/external_tools/sessionless_launch' params = {'id': tool_id} return self._get_resource(url, params)
def generate_new_bracket(self): """generate a new bracket""" logger.debug( 'start to create a new SuccessiveHalving iteration, self.curr_s=%d', self.curr_s) if self.curr_s < 0: logger.info("s < 0, Finish this round of Hyperband in BOHB. Generate new round") self.curr_s = self.s_max self.brackets[self.curr_s] = Bracket(s=self.curr_s, s_max=self.s_max, eta=self.eta, max_budget=self.max_budget, optimize_mode=self.optimize_mode) next_n, next_r = self.brackets[self.curr_s].get_n_r() logger.debug( 'new SuccessiveHalving iteration, next_n=%d, next_r=%d', next_n, next_r) # rewrite with TPE generated_hyper_configs = self.brackets[self.curr_s].get_hyperparameter_configurations( next_n, next_r, self.cg) self.generated_hyper_configs = generated_hyper_configs.copy()
def function[generate_new_bracket, parameter[self]]: constant[generate a new bracket] call[name[logger].debug, parameter[constant[start to create a new SuccessiveHalving iteration, self.curr_s=%d], name[self].curr_s]] if compare[name[self].curr_s less[<] constant[0]] begin[:] call[name[logger].info, parameter[constant[s < 0, Finish this round of Hyperband in BOHB. Generate new round]]] name[self].curr_s assign[=] name[self].s_max call[name[self].brackets][name[self].curr_s] assign[=] call[name[Bracket], parameter[]] <ast.Tuple object at 0x7da2054a6800> assign[=] call[call[name[self].brackets][name[self].curr_s].get_n_r, parameter[]] call[name[logger].debug, parameter[constant[new SuccessiveHalving iteration, next_n=%d, next_r=%d], name[next_n], name[next_r]]] variable[generated_hyper_configs] assign[=] call[call[name[self].brackets][name[self].curr_s].get_hyperparameter_configurations, parameter[name[next_n], name[next_r], name[self].cg]] name[self].generated_hyper_configs assign[=] call[name[generated_hyper_configs].copy, parameter[]]
keyword[def] identifier[generate_new_bracket] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[curr_s] ) keyword[if] identifier[self] . identifier[curr_s] < literal[int] : identifier[logger] . identifier[info] ( literal[string] ) identifier[self] . identifier[curr_s] = identifier[self] . identifier[s_max] identifier[self] . identifier[brackets] [ identifier[self] . identifier[curr_s] ]= identifier[Bracket] ( identifier[s] = identifier[self] . identifier[curr_s] , identifier[s_max] = identifier[self] . identifier[s_max] , identifier[eta] = identifier[self] . identifier[eta] , identifier[max_budget] = identifier[self] . identifier[max_budget] , identifier[optimize_mode] = identifier[self] . identifier[optimize_mode] ) identifier[next_n] , identifier[next_r] = identifier[self] . identifier[brackets] [ identifier[self] . identifier[curr_s] ]. identifier[get_n_r] () identifier[logger] . identifier[debug] ( literal[string] , identifier[next_n] , identifier[next_r] ) identifier[generated_hyper_configs] = identifier[self] . identifier[brackets] [ identifier[self] . identifier[curr_s] ]. identifier[get_hyperparameter_configurations] ( identifier[next_n] , identifier[next_r] , identifier[self] . identifier[cg] ) identifier[self] . identifier[generated_hyper_configs] = identifier[generated_hyper_configs] . identifier[copy] ()
def generate_new_bracket(self): """generate a new bracket""" logger.debug('start to create a new SuccessiveHalving iteration, self.curr_s=%d', self.curr_s) if self.curr_s < 0: logger.info('s < 0, Finish this round of Hyperband in BOHB. Generate new round') self.curr_s = self.s_max # depends on [control=['if'], data=[]] self.brackets[self.curr_s] = Bracket(s=self.curr_s, s_max=self.s_max, eta=self.eta, max_budget=self.max_budget, optimize_mode=self.optimize_mode) (next_n, next_r) = self.brackets[self.curr_s].get_n_r() logger.debug('new SuccessiveHalving iteration, next_n=%d, next_r=%d', next_n, next_r) # rewrite with TPE generated_hyper_configs = self.brackets[self.curr_s].get_hyperparameter_configurations(next_n, next_r, self.cg) self.generated_hyper_configs = generated_hyper_configs.copy()
def prj_view_user(self, *args, **kwargs): """View the, in the user table view selected, user. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return i = self.prj_user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() self.view_user(user)
def function[prj_view_user, parameter[self]]: constant[View the, in the user table view selected, user. :returns: None :rtype: None :raises: None ] if <ast.UnaryOp object at 0x7da18f722350> begin[:] return[None] variable[i] assign[=] call[name[self].prj_user_tablev.currentIndex, parameter[]] variable[item] assign[=] call[name[i].internalPointer, parameter[]] if name[item] begin[:] variable[user] assign[=] call[name[item].internal_data, parameter[]] call[name[self].view_user, parameter[name[user]]]
keyword[def] identifier[prj_view_user] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[cur_prj] : keyword[return] identifier[i] = identifier[self] . identifier[prj_user_tablev] . identifier[currentIndex] () identifier[item] = identifier[i] . identifier[internalPointer] () keyword[if] identifier[item] : identifier[user] = identifier[item] . identifier[internal_data] () identifier[self] . identifier[view_user] ( identifier[user] )
def prj_view_user(self, *args, **kwargs): """View the, in the user table view selected, user. :returns: None :rtype: None :raises: None """ if not self.cur_prj: return # depends on [control=['if'], data=[]] i = self.prj_user_tablev.currentIndex() item = i.internalPointer() if item: user = item.internal_data() self.view_user(user) # depends on [control=['if'], data=[]]
def last_ehlo_response(self, response: SMTPResponse) -> None: """ When setting the last EHLO response, parse the message for supported extensions and auth methods. """ extensions, auth_methods = parse_esmtp_extensions(response.message) self._last_ehlo_response = response self.esmtp_extensions = extensions self.server_auth_methods = auth_methods self.supports_esmtp = True
def function[last_ehlo_response, parameter[self, response]]: constant[ When setting the last EHLO response, parse the message for supported extensions and auth methods. ] <ast.Tuple object at 0x7da204347e20> assign[=] call[name[parse_esmtp_extensions], parameter[name[response].message]] name[self]._last_ehlo_response assign[=] name[response] name[self].esmtp_extensions assign[=] name[extensions] name[self].server_auth_methods assign[=] name[auth_methods] name[self].supports_esmtp assign[=] constant[True]
keyword[def] identifier[last_ehlo_response] ( identifier[self] , identifier[response] : identifier[SMTPResponse] )-> keyword[None] : literal[string] identifier[extensions] , identifier[auth_methods] = identifier[parse_esmtp_extensions] ( identifier[response] . identifier[message] ) identifier[self] . identifier[_last_ehlo_response] = identifier[response] identifier[self] . identifier[esmtp_extensions] = identifier[extensions] identifier[self] . identifier[server_auth_methods] = identifier[auth_methods] identifier[self] . identifier[supports_esmtp] = keyword[True]
def last_ehlo_response(self, response: SMTPResponse) -> None: """ When setting the last EHLO response, parse the message for supported extensions and auth methods. """ (extensions, auth_methods) = parse_esmtp_extensions(response.message) self._last_ehlo_response = response self.esmtp_extensions = extensions self.server_auth_methods = auth_methods self.supports_esmtp = True
def words(self): """Return a list of word tokens. This excludes punctuation characters. If you want to include punctuation characters, access the ``tokens`` property. :returns: A :class:`WordList <WordList>` of word tokens. """ return WordList( word_tokenize(self.raw, self.tokenizer, include_punc=False))
def function[words, parameter[self]]: constant[Return a list of word tokens. This excludes punctuation characters. If you want to include punctuation characters, access the ``tokens`` property. :returns: A :class:`WordList <WordList>` of word tokens. ] return[call[name[WordList], parameter[call[name[word_tokenize], parameter[name[self].raw, name[self].tokenizer]]]]]
keyword[def] identifier[words] ( identifier[self] ): literal[string] keyword[return] identifier[WordList] ( identifier[word_tokenize] ( identifier[self] . identifier[raw] , identifier[self] . identifier[tokenizer] , identifier[include_punc] = keyword[False] ))
def words(self): """Return a list of word tokens. This excludes punctuation characters. If you want to include punctuation characters, access the ``tokens`` property. :returns: A :class:`WordList <WordList>` of word tokens. """ return WordList(word_tokenize(self.raw, self.tokenizer, include_punc=False))
def connect(self, server): "Connects to a server and return a connection id." if 'connections' not in session: session['connections'] = {} session.save() conns = session['connections'] id = str(len(conns)) conn = Connection(server) conns[id] = conn yield request.environ['cogen.core'].events.AddCoro(conn.pull) yield id
def function[connect, parameter[self, server]]: constant[Connects to a server and return a connection id.] if compare[constant[connections] <ast.NotIn object at 0x7da2590d7190> name[session]] begin[:] call[name[session]][constant[connections]] assign[=] dictionary[[], []] call[name[session].save, parameter[]] variable[conns] assign[=] call[name[session]][constant[connections]] variable[id] assign[=] call[name[str], parameter[call[name[len], parameter[name[conns]]]]] variable[conn] assign[=] call[name[Connection], parameter[name[server]]] call[name[conns]][name[id]] assign[=] name[conn] <ast.Yield object at 0x7da18f00e770> <ast.Yield object at 0x7da18f00ebf0>
keyword[def] identifier[connect] ( identifier[self] , identifier[server] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[session] : identifier[session] [ literal[string] ]={} identifier[session] . identifier[save] () identifier[conns] = identifier[session] [ literal[string] ] identifier[id] = identifier[str] ( identifier[len] ( identifier[conns] )) identifier[conn] = identifier[Connection] ( identifier[server] ) identifier[conns] [ identifier[id] ]= identifier[conn] keyword[yield] identifier[request] . identifier[environ] [ literal[string] ]. identifier[events] . identifier[AddCoro] ( identifier[conn] . identifier[pull] ) keyword[yield] identifier[id]
def connect(self, server): """Connects to a server and return a connection id.""" if 'connections' not in session: session['connections'] = {} session.save() # depends on [control=['if'], data=['session']] conns = session['connections'] id = str(len(conns)) conn = Connection(server) conns[id] = conn yield request.environ['cogen.core'].events.AddCoro(conn.pull) yield id
def from_rdata_list(name, ttl, rdatas): """Create an RRset with the specified name and TTL, and with the specified list of rdata objects. @rtype: dns.rrset.RRset object """ if isinstance(name, (str, unicode)): name = dns.name.from_text(name, None) if len(rdatas) == 0: raise ValueError("rdata list must not be empty") r = None for rd in rdatas: if r is None: r = RRset(name, rd.rdclass, rd.rdtype) r.update_ttl(ttl) first_time = False r.add(rd) return r
def function[from_rdata_list, parameter[name, ttl, rdatas]]: constant[Create an RRset with the specified name and TTL, and with the specified list of rdata objects. @rtype: dns.rrset.RRset object ] if call[name[isinstance], parameter[name[name], tuple[[<ast.Name object at 0x7da1b2345b10>, <ast.Name object at 0x7da1b2345f90>]]]] begin[:] variable[name] assign[=] call[name[dns].name.from_text, parameter[name[name], constant[None]]] if compare[call[name[len], parameter[name[rdatas]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b23460e0> variable[r] assign[=] constant[None] for taget[name[rd]] in starred[name[rdatas]] begin[:] if compare[name[r] is constant[None]] begin[:] variable[r] assign[=] call[name[RRset], parameter[name[name], name[rd].rdclass, name[rd].rdtype]] call[name[r].update_ttl, parameter[name[ttl]]] variable[first_time] assign[=] constant[False] call[name[r].add, parameter[name[rd]]] return[name[r]]
keyword[def] identifier[from_rdata_list] ( identifier[name] , identifier[ttl] , identifier[rdatas] ): literal[string] keyword[if] identifier[isinstance] ( identifier[name] ,( identifier[str] , identifier[unicode] )): identifier[name] = identifier[dns] . identifier[name] . identifier[from_text] ( identifier[name] , keyword[None] ) keyword[if] identifier[len] ( identifier[rdatas] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[r] = keyword[None] keyword[for] identifier[rd] keyword[in] identifier[rdatas] : keyword[if] identifier[r] keyword[is] keyword[None] : identifier[r] = identifier[RRset] ( identifier[name] , identifier[rd] . identifier[rdclass] , identifier[rd] . identifier[rdtype] ) identifier[r] . identifier[update_ttl] ( identifier[ttl] ) identifier[first_time] = keyword[False] identifier[r] . identifier[add] ( identifier[rd] ) keyword[return] identifier[r]
def from_rdata_list(name, ttl, rdatas): """Create an RRset with the specified name and TTL, and with the specified list of rdata objects. @rtype: dns.rrset.RRset object """ if isinstance(name, (str, unicode)): name = dns.name.from_text(name, None) # depends on [control=['if'], data=[]] if len(rdatas) == 0: raise ValueError('rdata list must not be empty') # depends on [control=['if'], data=[]] r = None for rd in rdatas: if r is None: r = RRset(name, rd.rdclass, rd.rdtype) r.update_ttl(ttl) first_time = False # depends on [control=['if'], data=['r']] r.add(rd) # depends on [control=['for'], data=['rd']] return r
def __draw_constant_line(self, value_label_style): "Draw a constant line on the y-axis with the label" value, label, style = value_label_style start = self.transform_output_coordinates((0, value))[1] stop = self.graph_width path = etree.SubElement(self.graph, 'path', { 'd': 'M 0 %(start)s h%(stop)s' % locals(), 'class': 'constantLine'}) if style: path.set('style', style) text = etree.SubElement(self.graph, 'text', { 'x': str(2), 'y': str(start - 2), 'class': 'constantLine'}) text.text = label
def function[__draw_constant_line, parameter[self, value_label_style]]: constant[Draw a constant line on the y-axis with the label] <ast.Tuple object at 0x7da1b0215e70> assign[=] name[value_label_style] variable[start] assign[=] call[call[name[self].transform_output_coordinates, parameter[tuple[[<ast.Constant object at 0x7da1b0217910>, <ast.Name object at 0x7da1b02177c0>]]]]][constant[1]] variable[stop] assign[=] name[self].graph_width variable[path] assign[=] call[name[etree].SubElement, parameter[name[self].graph, constant[path], dictionary[[<ast.Constant object at 0x7da1b0214880>, <ast.Constant object at 0x7da1b0216050>], [<ast.BinOp object at 0x7da1b0216a40>, <ast.Constant object at 0x7da1b0216c20>]]]] if name[style] begin[:] call[name[path].set, parameter[constant[style], name[style]]] variable[text] assign[=] call[name[etree].SubElement, parameter[name[self].graph, constant[text], dictionary[[<ast.Constant object at 0x7da1b0214910>, <ast.Constant object at 0x7da1b0215f60>, <ast.Constant object at 0x7da1b02160e0>], [<ast.Call object at 0x7da1b0216c80>, <ast.Call object at 0x7da1b0214ca0>, <ast.Constant object at 0x7da1b02174f0>]]]] name[text].text assign[=] name[label]
keyword[def] identifier[__draw_constant_line] ( identifier[self] , identifier[value_label_style] ): literal[string] identifier[value] , identifier[label] , identifier[style] = identifier[value_label_style] identifier[start] = identifier[self] . identifier[transform_output_coordinates] (( literal[int] , identifier[value] ))[ literal[int] ] identifier[stop] = identifier[self] . identifier[graph_width] identifier[path] = identifier[etree] . identifier[SubElement] ( identifier[self] . identifier[graph] , literal[string] ,{ literal[string] : literal[string] % identifier[locals] (), literal[string] : literal[string] }) keyword[if] identifier[style] : identifier[path] . identifier[set] ( literal[string] , identifier[style] ) identifier[text] = identifier[etree] . identifier[SubElement] ( identifier[self] . identifier[graph] , literal[string] ,{ literal[string] : identifier[str] ( literal[int] ), literal[string] : identifier[str] ( identifier[start] - literal[int] ), literal[string] : literal[string] }) identifier[text] . identifier[text] = identifier[label]
def __draw_constant_line(self, value_label_style): """Draw a constant line on the y-axis with the label""" (value, label, style) = value_label_style start = self.transform_output_coordinates((0, value))[1] stop = self.graph_width path = etree.SubElement(self.graph, 'path', {'d': 'M 0 %(start)s h%(stop)s' % locals(), 'class': 'constantLine'}) if style: path.set('style', style) # depends on [control=['if'], data=[]] text = etree.SubElement(self.graph, 'text', {'x': str(2), 'y': str(start - 2), 'class': 'constantLine'}) text.text = label
def create_config(self, env, conf): """ Set conf to env under service. pass None to env for root. """ if not isinstance(conf, collections.Mapping): raise ValueError("conf must be a collections.Mapping") self.zk.ensure_path(self.view_path) self._create( self._get_env_path(env), conf ) self._update_view(env)
def function[create_config, parameter[self, env, conf]]: constant[ Set conf to env under service. pass None to env for root. ] if <ast.UnaryOp object at 0x7da18bc72290> begin[:] <ast.Raise object at 0x7da18bc73760> call[name[self].zk.ensure_path, parameter[name[self].view_path]] call[name[self]._create, parameter[call[name[self]._get_env_path, parameter[name[env]]], name[conf]]] call[name[self]._update_view, parameter[name[env]]]
keyword[def] identifier[create_config] ( identifier[self] , identifier[env] , identifier[conf] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[conf] , identifier[collections] . identifier[Mapping] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[zk] . identifier[ensure_path] ( identifier[self] . identifier[view_path] ) identifier[self] . identifier[_create] ( identifier[self] . identifier[_get_env_path] ( identifier[env] ), identifier[conf] ) identifier[self] . identifier[_update_view] ( identifier[env] )
def create_config(self, env, conf): """ Set conf to env under service. pass None to env for root. """ if not isinstance(conf, collections.Mapping): raise ValueError('conf must be a collections.Mapping') # depends on [control=['if'], data=[]] self.zk.ensure_path(self.view_path) self._create(self._get_env_path(env), conf) self._update_view(env)
def __list_supybot_archives(self): """List the filepath of the archives stored in dirpath""" archives = [] for root, _, files in os.walk(self.dirpath): for filename in files: location = os.path.join(root, filename) archives.append(location) return archives
def function[__list_supybot_archives, parameter[self]]: constant[List the filepath of the archives stored in dirpath] variable[archives] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0315db0>, <ast.Name object at 0x7da1b0317c70>, <ast.Name object at 0x7da1b02857e0>]]] in starred[call[name[os].walk, parameter[name[self].dirpath]]] begin[:] for taget[name[filename]] in starred[name[files]] begin[:] variable[location] assign[=] call[name[os].path.join, parameter[name[root], name[filename]]] call[name[archives].append, parameter[name[location]]] return[name[archives]]
keyword[def] identifier[__list_supybot_archives] ( identifier[self] ): literal[string] identifier[archives] =[] keyword[for] identifier[root] , identifier[_] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[self] . identifier[dirpath] ): keyword[for] identifier[filename] keyword[in] identifier[files] : identifier[location] = identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[filename] ) identifier[archives] . identifier[append] ( identifier[location] ) keyword[return] identifier[archives]
def __list_supybot_archives(self): """List the filepath of the archives stored in dirpath""" archives = [] for (root, _, files) in os.walk(self.dirpath): for filename in files: location = os.path.join(root, filename) archives.append(location) # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]] return archives
def nullval(cls): """Create a new instance where all of the values are 0""" d = dict(cls.__dict__.items()) for k in d: d[k] = 0 d['sl'] = cls.sl d[cls.level] = 0 return cls(**d)
def function[nullval, parameter[cls]]: constant[Create a new instance where all of the values are 0] variable[d] assign[=] call[name[dict], parameter[call[name[cls].__dict__.items, parameter[]]]] for taget[name[k]] in starred[name[d]] begin[:] call[name[d]][name[k]] assign[=] constant[0] call[name[d]][constant[sl]] assign[=] name[cls].sl call[name[d]][name[cls].level] assign[=] constant[0] return[call[name[cls], parameter[]]]
keyword[def] identifier[nullval] ( identifier[cls] ): literal[string] identifier[d] = identifier[dict] ( identifier[cls] . identifier[__dict__] . identifier[items] ()) keyword[for] identifier[k] keyword[in] identifier[d] : identifier[d] [ identifier[k] ]= literal[int] identifier[d] [ literal[string] ]= identifier[cls] . identifier[sl] identifier[d] [ identifier[cls] . identifier[level] ]= literal[int] keyword[return] identifier[cls] (** identifier[d] )
def nullval(cls): """Create a new instance where all of the values are 0""" d = dict(cls.__dict__.items()) for k in d: d[k] = 0 # depends on [control=['for'], data=['k']] d['sl'] = cls.sl d[cls.level] = 0 return cls(**d)
def quartus_add_interface_port(self, buff: List[str], intfName: str, signal, logicName: str, packager: "IpCorePackager"): """ Add subinterface to Quartus interface :param buff: line buffer for output :param intfName: name of top interface :param signal: subinterface to create port for :param logicName: name of port in Quartus """ d = signal._direction if d == INTF_DIRECTION.MASTER: dir_ = "Output" elif d == INTF_DIRECTION.SLAVE: dir_ = "Input" else: raise ValueError(d) _, width, _ = packager.getTypeWidth(packager.getInterfaceType(signal), do_eval=True) buff.append("add_interface_port %s %s %s %s %s" % ( intfName, packager.getInterfacePhysicalName(signal), logicName, dir_, width ))
def function[quartus_add_interface_port, parameter[self, buff, intfName, signal, logicName, packager]]: constant[ Add subinterface to Quartus interface :param buff: line buffer for output :param intfName: name of top interface :param signal: subinterface to create port for :param logicName: name of port in Quartus ] variable[d] assign[=] name[signal]._direction if compare[name[d] equal[==] name[INTF_DIRECTION].MASTER] begin[:] variable[dir_] assign[=] constant[Output] <ast.Tuple object at 0x7da1b0926890> assign[=] call[name[packager].getTypeWidth, parameter[call[name[packager].getInterfaceType, parameter[name[signal]]]]] call[name[buff].append, parameter[binary_operation[constant[add_interface_port %s %s %s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0926260>, <ast.Call object at 0x7da1b0925840>, <ast.Name object at 0x7da1b0926b60>, <ast.Name object at 0x7da1b0926bc0>, <ast.Name object at 0x7da1b09248e0>]]]]]
keyword[def] identifier[quartus_add_interface_port] ( identifier[self] , identifier[buff] : identifier[List] [ identifier[str] ], identifier[intfName] : identifier[str] , identifier[signal] , identifier[logicName] : identifier[str] , identifier[packager] : literal[string] ): literal[string] identifier[d] = identifier[signal] . identifier[_direction] keyword[if] identifier[d] == identifier[INTF_DIRECTION] . identifier[MASTER] : identifier[dir_] = literal[string] keyword[elif] identifier[d] == identifier[INTF_DIRECTION] . identifier[SLAVE] : identifier[dir_] = literal[string] keyword[else] : keyword[raise] identifier[ValueError] ( identifier[d] ) identifier[_] , identifier[width] , identifier[_] = identifier[packager] . identifier[getTypeWidth] ( identifier[packager] . identifier[getInterfaceType] ( identifier[signal] ), identifier[do_eval] = keyword[True] ) identifier[buff] . identifier[append] ( literal[string] %( identifier[intfName] , identifier[packager] . identifier[getInterfacePhysicalName] ( identifier[signal] ), identifier[logicName] , identifier[dir_] , identifier[width] ))
def quartus_add_interface_port(self, buff: List[str], intfName: str, signal, logicName: str, packager: 'IpCorePackager'): """ Add subinterface to Quartus interface :param buff: line buffer for output :param intfName: name of top interface :param signal: subinterface to create port for :param logicName: name of port in Quartus """ d = signal._direction if d == INTF_DIRECTION.MASTER: dir_ = 'Output' # depends on [control=['if'], data=[]] elif d == INTF_DIRECTION.SLAVE: dir_ = 'Input' # depends on [control=['if'], data=[]] else: raise ValueError(d) (_, width, _) = packager.getTypeWidth(packager.getInterfaceType(signal), do_eval=True) buff.append('add_interface_port %s %s %s %s %s' % (intfName, packager.getInterfacePhysicalName(signal), logicName, dir_, width))
def list_topics(self, request): """ List all topics in a logstore. Unsuccessful opertaion will cause an LogException. :type request: ListTopicsRequest :param request: the ListTopics request parameters class. :return: ListTopicsResponse :raise: LogException """ headers = {} params = {} if request.get_token() is not None: params['token'] = request.get_token() if request.get_line() is not None: params['line'] = request.get_line() params['type'] = 'topic' logstore = request.get_logstore() project = request.get_project() resource = "/logstores/" + logstore (resp, header) = self._send("GET", project, None, resource, params, headers) return ListTopicsResponse(resp, header)
def function[list_topics, parameter[self, request]]: constant[ List all topics in a logstore. Unsuccessful opertaion will cause an LogException. :type request: ListTopicsRequest :param request: the ListTopics request parameters class. :return: ListTopicsResponse :raise: LogException ] variable[headers] assign[=] dictionary[[], []] variable[params] assign[=] dictionary[[], []] if compare[call[name[request].get_token, parameter[]] is_not constant[None]] begin[:] call[name[params]][constant[token]] assign[=] call[name[request].get_token, parameter[]] if compare[call[name[request].get_line, parameter[]] is_not constant[None]] begin[:] call[name[params]][constant[line]] assign[=] call[name[request].get_line, parameter[]] call[name[params]][constant[type]] assign[=] constant[topic] variable[logstore] assign[=] call[name[request].get_logstore, parameter[]] variable[project] assign[=] call[name[request].get_project, parameter[]] variable[resource] assign[=] binary_operation[constant[/logstores/] + name[logstore]] <ast.Tuple object at 0x7da1b086f8e0> assign[=] call[name[self]._send, parameter[constant[GET], name[project], constant[None], name[resource], name[params], name[headers]]] return[call[name[ListTopicsResponse], parameter[name[resp], name[header]]]]
keyword[def] identifier[list_topics] ( identifier[self] , identifier[request] ): literal[string] identifier[headers] ={} identifier[params] ={} keyword[if] identifier[request] . identifier[get_token] () keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[request] . identifier[get_token] () keyword[if] identifier[request] . identifier[get_line] () keyword[is] keyword[not] keyword[None] : identifier[params] [ literal[string] ]= identifier[request] . identifier[get_line] () identifier[params] [ literal[string] ]= literal[string] identifier[logstore] = identifier[request] . identifier[get_logstore] () identifier[project] = identifier[request] . identifier[get_project] () identifier[resource] = literal[string] + identifier[logstore] ( identifier[resp] , identifier[header] )= identifier[self] . identifier[_send] ( literal[string] , identifier[project] , keyword[None] , identifier[resource] , identifier[params] , identifier[headers] ) keyword[return] identifier[ListTopicsResponse] ( identifier[resp] , identifier[header] )
def list_topics(self, request): """ List all topics in a logstore. Unsuccessful opertaion will cause an LogException. :type request: ListTopicsRequest :param request: the ListTopics request parameters class. :return: ListTopicsResponse :raise: LogException """ headers = {} params = {} if request.get_token() is not None: params['token'] = request.get_token() # depends on [control=['if'], data=[]] if request.get_line() is not None: params['line'] = request.get_line() # depends on [control=['if'], data=[]] params['type'] = 'topic' logstore = request.get_logstore() project = request.get_project() resource = '/logstores/' + logstore (resp, header) = self._send('GET', project, None, resource, params, headers) return ListTopicsResponse(resp, header)
def _search_tree_backwards(self, template, parent_lanes): """Searches the process tree backwards in search of a provided process The search takes into consideration the provided parent lanes and searches only those Parameters ---------- template : str Name of the process template attribute being searched parent_lanes : list List of integers with the parent lanes to be searched Returns ------- bool Returns True when the template is found. Otherwise returns False. """ for p in self.processes[::-1]: # Ignore process in different lanes if p.lane not in parent_lanes: continue # template found if p.template == template: return True return False
def function[_search_tree_backwards, parameter[self, template, parent_lanes]]: constant[Searches the process tree backwards in search of a provided process The search takes into consideration the provided parent lanes and searches only those Parameters ---------- template : str Name of the process template attribute being searched parent_lanes : list List of integers with the parent lanes to be searched Returns ------- bool Returns True when the template is found. Otherwise returns False. ] for taget[name[p]] in starred[call[name[self].processes][<ast.Slice object at 0x7da1b023c190>]] begin[:] if compare[name[p].lane <ast.NotIn object at 0x7da2590d7190> name[parent_lanes]] begin[:] continue if compare[name[p].template equal[==] name[template]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[_search_tree_backwards] ( identifier[self] , identifier[template] , identifier[parent_lanes] ): literal[string] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[processes] [::- literal[int] ]: keyword[if] identifier[p] . identifier[lane] keyword[not] keyword[in] identifier[parent_lanes] : keyword[continue] keyword[if] identifier[p] . identifier[template] == identifier[template] : keyword[return] keyword[True] keyword[return] keyword[False]
def _search_tree_backwards(self, template, parent_lanes): """Searches the process tree backwards in search of a provided process The search takes into consideration the provided parent lanes and searches only those Parameters ---------- template : str Name of the process template attribute being searched parent_lanes : list List of integers with the parent lanes to be searched Returns ------- bool Returns True when the template is found. Otherwise returns False. """ for p in self.processes[::-1]: # Ignore process in different lanes if p.lane not in parent_lanes: continue # depends on [control=['if'], data=[]] # template found if p.template == template: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] return False
def peek(self, length): """ Returns up to length bytes from the current memory segment """ t = self.current_position + length if not self.current_segment.inrange(t): raise Exception('Would read over segment boundaries!') return self.current_segment.data[self.current_position - self.current_segment.start_address :t - self.current_segment.start_address]
def function[peek, parameter[self, length]]: constant[ Returns up to length bytes from the current memory segment ] variable[t] assign[=] binary_operation[name[self].current_position + name[length]] if <ast.UnaryOp object at 0x7da1b102a2c0> begin[:] <ast.Raise object at 0x7da1b1028d60> return[call[name[self].current_segment.data][<ast.Slice object at 0x7da1b10290f0>]]
keyword[def] identifier[peek] ( identifier[self] , identifier[length] ): literal[string] identifier[t] = identifier[self] . identifier[current_position] + identifier[length] keyword[if] keyword[not] identifier[self] . identifier[current_segment] . identifier[inrange] ( identifier[t] ): keyword[raise] identifier[Exception] ( literal[string] ) keyword[return] identifier[self] . identifier[current_segment] . identifier[data] [ identifier[self] . identifier[current_position] - identifier[self] . identifier[current_segment] . identifier[start_address] : identifier[t] - identifier[self] . identifier[current_segment] . identifier[start_address] ]
def peek(self, length): """ Returns up to length bytes from the current memory segment """ t = self.current_position + length if not self.current_segment.inrange(t): raise Exception('Would read over segment boundaries!') # depends on [control=['if'], data=[]] return self.current_segment.data[self.current_position - self.current_segment.start_address:t - self.current_segment.start_address]
def _init( self, default_prefix='_', fext=TMPL_FN_EXT, req_tmpl_name=REQ_TMPL_NAME, text_prefix=REQUIREJS_TEXT_PREFIX, auto_reload=False, *a, **kw): """ Arguments: registry_name The name of this registry. """ self.default_prefix = default_prefix self.molds = {} self.tracked_entry_points = {} self.fext = fext self.req_tmpl_name = req_tmpl_name self.text_prefix = text_prefix self.auto_reload = auto_reload
def function[_init, parameter[self, default_prefix, fext, req_tmpl_name, text_prefix, auto_reload]]: constant[ Arguments: registry_name The name of this registry. ] name[self].default_prefix assign[=] name[default_prefix] name[self].molds assign[=] dictionary[[], []] name[self].tracked_entry_points assign[=] dictionary[[], []] name[self].fext assign[=] name[fext] name[self].req_tmpl_name assign[=] name[req_tmpl_name] name[self].text_prefix assign[=] name[text_prefix] name[self].auto_reload assign[=] name[auto_reload]
keyword[def] identifier[_init] ( identifier[self] , identifier[default_prefix] = literal[string] , identifier[fext] = identifier[TMPL_FN_EXT] , identifier[req_tmpl_name] = identifier[REQ_TMPL_NAME] , identifier[text_prefix] = identifier[REQUIREJS_TEXT_PREFIX] , identifier[auto_reload] = keyword[False] , * identifier[a] ,** identifier[kw] ): literal[string] identifier[self] . identifier[default_prefix] = identifier[default_prefix] identifier[self] . identifier[molds] ={} identifier[self] . identifier[tracked_entry_points] ={} identifier[self] . identifier[fext] = identifier[fext] identifier[self] . identifier[req_tmpl_name] = identifier[req_tmpl_name] identifier[self] . identifier[text_prefix] = identifier[text_prefix] identifier[self] . identifier[auto_reload] = identifier[auto_reload]
def _init(self, default_prefix='_', fext=TMPL_FN_EXT, req_tmpl_name=REQ_TMPL_NAME, text_prefix=REQUIREJS_TEXT_PREFIX, auto_reload=False, *a, **kw): """ Arguments: registry_name The name of this registry. """ self.default_prefix = default_prefix self.molds = {} self.tracked_entry_points = {} self.fext = fext self.req_tmpl_name = req_tmpl_name self.text_prefix = text_prefix self.auto_reload = auto_reload
def build(self): """Do the query.""" result = [] for entry in self.sequence: ignore = False for filter_function in self.filter_functions: if not filter_function(entry): ignore = True break if not ignore: value = entry for transform_function in self.transform_functions: value = transform_function(value) result.append(value) return result
def function[build, parameter[self]]: constant[Do the query.] variable[result] assign[=] list[[]] for taget[name[entry]] in starred[name[self].sequence] begin[:] variable[ignore] assign[=] constant[False] for taget[name[filter_function]] in starred[name[self].filter_functions] begin[:] if <ast.UnaryOp object at 0x7da18bccae00> begin[:] variable[ignore] assign[=] constant[True] break if <ast.UnaryOp object at 0x7da18bcc9a50> begin[:] variable[value] assign[=] name[entry] for taget[name[transform_function]] in starred[name[self].transform_functions] begin[:] variable[value] assign[=] call[name[transform_function], parameter[name[value]]] call[name[result].append, parameter[name[value]]] return[name[result]]
keyword[def] identifier[build] ( identifier[self] ): literal[string] identifier[result] =[] keyword[for] identifier[entry] keyword[in] identifier[self] . identifier[sequence] : identifier[ignore] = keyword[False] keyword[for] identifier[filter_function] keyword[in] identifier[self] . identifier[filter_functions] : keyword[if] keyword[not] identifier[filter_function] ( identifier[entry] ): identifier[ignore] = keyword[True] keyword[break] keyword[if] keyword[not] identifier[ignore] : identifier[value] = identifier[entry] keyword[for] identifier[transform_function] keyword[in] identifier[self] . identifier[transform_functions] : identifier[value] = identifier[transform_function] ( identifier[value] ) identifier[result] . identifier[append] ( identifier[value] ) keyword[return] identifier[result]
def build(self): """Do the query.""" result = [] for entry in self.sequence: ignore = False for filter_function in self.filter_functions: if not filter_function(entry): ignore = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filter_function']] if not ignore: value = entry for transform_function in self.transform_functions: value = transform_function(value) # depends on [control=['for'], data=['transform_function']] result.append(value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['entry']] return result
def load_requirements(self): """If there are python library requirements set, append the python dir to the path.""" for module_name, pip_name in iteritems(self.metadata.requirements): extant = self.dataset.config.requirements[module_name].url force = (extant and extant != pip_name) self._library.install_packages(module_name, pip_name, force=force) self.dataset.config.requirements[module_name].url = pip_name python_dir = self._library.filesystem.python() sys.path.append(python_dir)
def function[load_requirements, parameter[self]]: constant[If there are python library requirements set, append the python dir to the path.] for taget[tuple[[<ast.Name object at 0x7da18c4cd660>, <ast.Name object at 0x7da18c4ceb00>]]] in starred[call[name[iteritems], parameter[name[self].metadata.requirements]]] begin[:] variable[extant] assign[=] call[name[self].dataset.config.requirements][name[module_name]].url variable[force] assign[=] <ast.BoolOp object at 0x7da20c6a9180> call[name[self]._library.install_packages, parameter[name[module_name], name[pip_name]]] call[name[self].dataset.config.requirements][name[module_name]].url assign[=] name[pip_name] variable[python_dir] assign[=] call[name[self]._library.filesystem.python, parameter[]] call[name[sys].path.append, parameter[name[python_dir]]]
keyword[def] identifier[load_requirements] ( identifier[self] ): literal[string] keyword[for] identifier[module_name] , identifier[pip_name] keyword[in] identifier[iteritems] ( identifier[self] . identifier[metadata] . identifier[requirements] ): identifier[extant] = identifier[self] . identifier[dataset] . identifier[config] . identifier[requirements] [ identifier[module_name] ]. identifier[url] identifier[force] =( identifier[extant] keyword[and] identifier[extant] != identifier[pip_name] ) identifier[self] . identifier[_library] . identifier[install_packages] ( identifier[module_name] , identifier[pip_name] , identifier[force] = identifier[force] ) identifier[self] . identifier[dataset] . identifier[config] . identifier[requirements] [ identifier[module_name] ]. identifier[url] = identifier[pip_name] identifier[python_dir] = identifier[self] . identifier[_library] . identifier[filesystem] . identifier[python] () identifier[sys] . identifier[path] . identifier[append] ( identifier[python_dir] )
def load_requirements(self): """If there are python library requirements set, append the python dir to the path.""" for (module_name, pip_name) in iteritems(self.metadata.requirements): extant = self.dataset.config.requirements[module_name].url force = extant and extant != pip_name self._library.install_packages(module_name, pip_name, force=force) self.dataset.config.requirements[module_name].url = pip_name # depends on [control=['for'], data=[]] python_dir = self._library.filesystem.python() sys.path.append(python_dir)
def image_search(auth=None, **kwargs): ''' Search for images CLI Example: .. code-block:: bash salt '*' glanceng.image_search name=image1 salt '*' glanceng.image_search ''' cloud = get_operator_cloud(auth) kwargs = _clean_kwargs(**kwargs) return cloud.search_images(**kwargs)
def function[image_search, parameter[auth]]: constant[ Search for images CLI Example: .. code-block:: bash salt '*' glanceng.image_search name=image1 salt '*' glanceng.image_search ] variable[cloud] assign[=] call[name[get_operator_cloud], parameter[name[auth]]] variable[kwargs] assign[=] call[name[_clean_kwargs], parameter[]] return[call[name[cloud].search_images, parameter[]]]
keyword[def] identifier[image_search] ( identifier[auth] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[cloud] = identifier[get_operator_cloud] ( identifier[auth] ) identifier[kwargs] = identifier[_clean_kwargs] (** identifier[kwargs] ) keyword[return] identifier[cloud] . identifier[search_images] (** identifier[kwargs] )
def image_search(auth=None, **kwargs): """ Search for images CLI Example: .. code-block:: bash salt '*' glanceng.image_search name=image1 salt '*' glanceng.image_search """ cloud = get_operator_cloud(auth) kwargs = _clean_kwargs(**kwargs) return cloud.search_images(**kwargs)
def getTmpFilename(self, tmp_dir=None, prefix='tmp', suffix='.txt', include_class_id=False, result_constructor=FilePath): """ Return a temp filename tmp_dir: directory where temporary files will be stored prefix: text to append to start of file name suffix: text to append to end of file name include_class_id: if True, will append a class identifier (built from the class name) to the filename following prefix. This is False by default b/c there is some string processing overhead in getting the class name. This will probably be most useful for testing: if temp files are being left behind by tests, you can turn this on in here (temporarily) to find out which tests are leaving the temp files. result_constructor: the constructor used to build the result (default: cogent.app.parameters.FilePath). Note that joining FilePath objects with one another or with strings, you must use the + operator. If this causes trouble, you can pass str as the the result_constructor. """ # check not none if not tmp_dir: tmp_dir = self.TmpDir # if not current directory, append "/" if not already on path elif not tmp_dir.endswith("/"): tmp_dir += "/" if include_class_id: # Append the classname to the prefix from the class name # so any problematic temp files can be associated with # the class that created them. This should be especially # useful for testing, but is turned off by default to # avoid the string-parsing overhead. class_id = str(self.__class__()) prefix = ''.join([prefix, class_id[class_id.rindex('.') + 1: class_id.index(' ')]]) try: mkdir(tmp_dir) except OSError: # Directory already exists pass # note: it is OK to join FilePath objects with + return result_constructor(tmp_dir) + result_constructor(prefix) + \ result_constructor(''.join([choice(_all_chars) for i in range(self.TmpNameLen)])) +\ result_constructor(suffix)
def function[getTmpFilename, parameter[self, tmp_dir, prefix, suffix, include_class_id, result_constructor]]: constant[ Return a temp filename tmp_dir: directory where temporary files will be stored prefix: text to append to start of file name suffix: text to append to end of file name include_class_id: if True, will append a class identifier (built from the class name) to the filename following prefix. This is False by default b/c there is some string processing overhead in getting the class name. This will probably be most useful for testing: if temp files are being left behind by tests, you can turn this on in here (temporarily) to find out which tests are leaving the temp files. result_constructor: the constructor used to build the result (default: cogent.app.parameters.FilePath). Note that joining FilePath objects with one another or with strings, you must use the + operator. If this causes trouble, you can pass str as the the result_constructor. ] if <ast.UnaryOp object at 0x7da1b0b72470> begin[:] variable[tmp_dir] assign[=] name[self].TmpDir if name[include_class_id] begin[:] variable[class_id] assign[=] call[name[str], parameter[call[name[self].__class__, parameter[]]]] variable[prefix] assign[=] call[constant[].join, parameter[list[[<ast.Name object at 0x7da1b0a30b50>, <ast.Subscript object at 0x7da1b0a30310>]]]] <ast.Try object at 0x7da1b0a6b9a0> return[binary_operation[binary_operation[binary_operation[call[name[result_constructor], parameter[name[tmp_dir]]] + call[name[result_constructor], parameter[name[prefix]]]] + call[name[result_constructor], parameter[call[constant[].join, parameter[<ast.ListComp object at 0x7da1b0a6c0d0>]]]]] + call[name[result_constructor], parameter[name[suffix]]]]]
keyword[def] identifier[getTmpFilename] ( identifier[self] , identifier[tmp_dir] = keyword[None] , identifier[prefix] = literal[string] , identifier[suffix] = literal[string] , identifier[include_class_id] = keyword[False] , identifier[result_constructor] = identifier[FilePath] ): literal[string] keyword[if] keyword[not] identifier[tmp_dir] : identifier[tmp_dir] = identifier[self] . identifier[TmpDir] keyword[elif] keyword[not] identifier[tmp_dir] . identifier[endswith] ( literal[string] ): identifier[tmp_dir] += literal[string] keyword[if] identifier[include_class_id] : identifier[class_id] = identifier[str] ( identifier[self] . identifier[__class__] ()) identifier[prefix] = literal[string] . identifier[join] ([ identifier[prefix] , identifier[class_id] [ identifier[class_id] . identifier[rindex] ( literal[string] )+ literal[int] : identifier[class_id] . identifier[index] ( literal[string] )]]) keyword[try] : identifier[mkdir] ( identifier[tmp_dir] ) keyword[except] identifier[OSError] : keyword[pass] keyword[return] identifier[result_constructor] ( identifier[tmp_dir] )+ identifier[result_constructor] ( identifier[prefix] )+ identifier[result_constructor] ( literal[string] . identifier[join] ([ identifier[choice] ( identifier[_all_chars] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[TmpNameLen] )]))+ identifier[result_constructor] ( identifier[suffix] )
def getTmpFilename(self, tmp_dir=None, prefix='tmp', suffix='.txt', include_class_id=False, result_constructor=FilePath): """ Return a temp filename tmp_dir: directory where temporary files will be stored prefix: text to append to start of file name suffix: text to append to end of file name include_class_id: if True, will append a class identifier (built from the class name) to the filename following prefix. This is False by default b/c there is some string processing overhead in getting the class name. This will probably be most useful for testing: if temp files are being left behind by tests, you can turn this on in here (temporarily) to find out which tests are leaving the temp files. result_constructor: the constructor used to build the result (default: cogent.app.parameters.FilePath). Note that joining FilePath objects with one another or with strings, you must use the + operator. If this causes trouble, you can pass str as the the result_constructor. """ # check not none if not tmp_dir: tmp_dir = self.TmpDir # depends on [control=['if'], data=[]] # if not current directory, append "/" if not already on path elif not tmp_dir.endswith('/'): tmp_dir += '/' # depends on [control=['if'], data=[]] if include_class_id: # Append the classname to the prefix from the class name # so any problematic temp files can be associated with # the class that created them. This should be especially # useful for testing, but is turned off by default to # avoid the string-parsing overhead. class_id = str(self.__class__()) prefix = ''.join([prefix, class_id[class_id.rindex('.') + 1:class_id.index(' ')]]) # depends on [control=['if'], data=[]] try: mkdir(tmp_dir) # depends on [control=['try'], data=[]] except OSError: # Directory already exists pass # depends on [control=['except'], data=[]] # note: it is OK to join FilePath objects with + return result_constructor(tmp_dir) + result_constructor(prefix) + result_constructor(''.join([choice(_all_chars) for i in range(self.TmpNameLen)])) + result_constructor(suffix)
def run(self, args): """Runs the information command. Args: self (InfoCommand): the ``InfoCommand`` instance args (Namespace): the arguments passed on the command-line Returns: ``None`` """ jlink = self.create_jlink(args) if args.product: print('Product: %s' % jlink.product_name) manufacturer = 'SEGGER' if jlink.oem is None else jlink.oem print('Manufacturer: %s' % manufacturer) print('Hardware Version: %s' % jlink.hardware_version) print('Firmware: %s' % jlink.firmware_version) print('DLL Version: %s' % jlink.version) print('Features: %s' % ', '.join(jlink.features)) elif args.jtag: status = jlink.hardware_status print('TCK Pin Status: %d' % status.tck) print('TDI Pin Status: %d' % status.tdi) print('TDO Pin Status: %d' % status.tdo) print('TMS Pin Status: %d' % status.tms) print('TRES Pin Status: %d' % status.tres) print('TRST Pin Status: %d' % status.trst)
def function[run, parameter[self, args]]: constant[Runs the information command. Args: self (InfoCommand): the ``InfoCommand`` instance args (Namespace): the arguments passed on the command-line Returns: ``None`` ] variable[jlink] assign[=] call[name[self].create_jlink, parameter[name[args]]] if name[args].product begin[:] call[name[print], parameter[binary_operation[constant[Product: %s] <ast.Mod object at 0x7da2590d6920> name[jlink].product_name]]] variable[manufacturer] assign[=] <ast.IfExp object at 0x7da1b1720940> call[name[print], parameter[binary_operation[constant[Manufacturer: %s] <ast.Mod object at 0x7da2590d6920> name[manufacturer]]]] call[name[print], parameter[binary_operation[constant[Hardware Version: %s] <ast.Mod object at 0x7da2590d6920> name[jlink].hardware_version]]] call[name[print], parameter[binary_operation[constant[Firmware: %s] <ast.Mod object at 0x7da2590d6920> name[jlink].firmware_version]]] call[name[print], parameter[binary_operation[constant[DLL Version: %s] <ast.Mod object at 0x7da2590d6920> name[jlink].version]]] call[name[print], parameter[binary_operation[constant[Features: %s] <ast.Mod object at 0x7da2590d6920> call[constant[, ].join, parameter[name[jlink].features]]]]]
keyword[def] identifier[run] ( identifier[self] , identifier[args] ): literal[string] identifier[jlink] = identifier[self] . identifier[create_jlink] ( identifier[args] ) keyword[if] identifier[args] . identifier[product] : identifier[print] ( literal[string] % identifier[jlink] . identifier[product_name] ) identifier[manufacturer] = literal[string] keyword[if] identifier[jlink] . identifier[oem] keyword[is] keyword[None] keyword[else] identifier[jlink] . identifier[oem] identifier[print] ( literal[string] % identifier[manufacturer] ) identifier[print] ( literal[string] % identifier[jlink] . identifier[hardware_version] ) identifier[print] ( literal[string] % identifier[jlink] . identifier[firmware_version] ) identifier[print] ( literal[string] % identifier[jlink] . identifier[version] ) identifier[print] ( literal[string] % literal[string] . identifier[join] ( identifier[jlink] . identifier[features] )) keyword[elif] identifier[args] . identifier[jtag] : identifier[status] = identifier[jlink] . identifier[hardware_status] identifier[print] ( literal[string] % identifier[status] . identifier[tck] ) identifier[print] ( literal[string] % identifier[status] . identifier[tdi] ) identifier[print] ( literal[string] % identifier[status] . identifier[tdo] ) identifier[print] ( literal[string] % identifier[status] . identifier[tms] ) identifier[print] ( literal[string] % identifier[status] . identifier[tres] ) identifier[print] ( literal[string] % identifier[status] . identifier[trst] )
def run(self, args): """Runs the information command. Args: self (InfoCommand): the ``InfoCommand`` instance args (Namespace): the arguments passed on the command-line Returns: ``None`` """ jlink = self.create_jlink(args) if args.product: print('Product: %s' % jlink.product_name) manufacturer = 'SEGGER' if jlink.oem is None else jlink.oem print('Manufacturer: %s' % manufacturer) print('Hardware Version: %s' % jlink.hardware_version) print('Firmware: %s' % jlink.firmware_version) print('DLL Version: %s' % jlink.version) print('Features: %s' % ', '.join(jlink.features)) # depends on [control=['if'], data=[]] elif args.jtag: status = jlink.hardware_status print('TCK Pin Status: %d' % status.tck) print('TDI Pin Status: %d' % status.tdi) print('TDO Pin Status: %d' % status.tdo) print('TMS Pin Status: %d' % status.tms) print('TRES Pin Status: %d' % status.tres) print('TRST Pin Status: %d' % status.trst) # depends on [control=['if'], data=[]]
def table_from_gwf(filename, name, columns=None, selection=None): """Read a Table from FrEvent structures in a GWF file (or files) Parameters ---------- filename : `str` path of GWF file to read name : `str` name associated with the `FrEvent` structures columns : `list` of `str` list of column names to read selection : `str`, `list` of `str` one or more column selection strings to apply, e.g. ``'snr>6'`` """ # open frame file if isinstance(filename, FILE_LIKE): filename = filename.name stream = io_gwf.open_gwf(filename) # parse selections and map to column indices if selection is None: selection = [] selection = parse_column_filters(selection) # read events row by row data = [] i = 0 while True: try: frevent = stream.ReadFrEvent(i, name) except IndexError: break i += 1 # read first event to get column names if columns is None: columns = _columns_from_frevent(frevent) # read row with filter row = _row_from_frevent(frevent, columns, selection) if row is not None: # if passed selection data.append(row) return Table(rows=data, names=columns)
def function[table_from_gwf, parameter[filename, name, columns, selection]]: constant[Read a Table from FrEvent structures in a GWF file (or files) Parameters ---------- filename : `str` path of GWF file to read name : `str` name associated with the `FrEvent` structures columns : `list` of `str` list of column names to read selection : `str`, `list` of `str` one or more column selection strings to apply, e.g. ``'snr>6'`` ] if call[name[isinstance], parameter[name[filename], name[FILE_LIKE]]] begin[:] variable[filename] assign[=] name[filename].name variable[stream] assign[=] call[name[io_gwf].open_gwf, parameter[name[filename]]] if compare[name[selection] is constant[None]] begin[:] variable[selection] assign[=] list[[]] variable[selection] assign[=] call[name[parse_column_filters], parameter[name[selection]]] variable[data] assign[=] list[[]] variable[i] assign[=] constant[0] while constant[True] begin[:] <ast.Try object at 0x7da18f09d570> <ast.AugAssign object at 0x7da18f09ead0> if compare[name[columns] is constant[None]] begin[:] variable[columns] assign[=] call[name[_columns_from_frevent], parameter[name[frevent]]] variable[row] assign[=] call[name[_row_from_frevent], parameter[name[frevent], name[columns], name[selection]]] if compare[name[row] is_not constant[None]] begin[:] call[name[data].append, parameter[name[row]]] return[call[name[Table], parameter[]]]
keyword[def] identifier[table_from_gwf] ( identifier[filename] , identifier[name] , identifier[columns] = keyword[None] , identifier[selection] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[filename] , identifier[FILE_LIKE] ): identifier[filename] = identifier[filename] . identifier[name] identifier[stream] = identifier[io_gwf] . identifier[open_gwf] ( identifier[filename] ) keyword[if] identifier[selection] keyword[is] keyword[None] : identifier[selection] =[] identifier[selection] = identifier[parse_column_filters] ( identifier[selection] ) identifier[data] =[] identifier[i] = literal[int] keyword[while] keyword[True] : keyword[try] : identifier[frevent] = identifier[stream] . identifier[ReadFrEvent] ( identifier[i] , identifier[name] ) keyword[except] identifier[IndexError] : keyword[break] identifier[i] += literal[int] keyword[if] identifier[columns] keyword[is] keyword[None] : identifier[columns] = identifier[_columns_from_frevent] ( identifier[frevent] ) identifier[row] = identifier[_row_from_frevent] ( identifier[frevent] , identifier[columns] , identifier[selection] ) keyword[if] identifier[row] keyword[is] keyword[not] keyword[None] : identifier[data] . identifier[append] ( identifier[row] ) keyword[return] identifier[Table] ( identifier[rows] = identifier[data] , identifier[names] = identifier[columns] )
def table_from_gwf(filename, name, columns=None, selection=None): """Read a Table from FrEvent structures in a GWF file (or files) Parameters ---------- filename : `str` path of GWF file to read name : `str` name associated with the `FrEvent` structures columns : `list` of `str` list of column names to read selection : `str`, `list` of `str` one or more column selection strings to apply, e.g. ``'snr>6'`` """ # open frame file if isinstance(filename, FILE_LIKE): filename = filename.name # depends on [control=['if'], data=[]] stream = io_gwf.open_gwf(filename) # parse selections and map to column indices if selection is None: selection = [] # depends on [control=['if'], data=['selection']] selection = parse_column_filters(selection) # read events row by row data = [] i = 0 while True: try: frevent = stream.ReadFrEvent(i, name) # depends on [control=['try'], data=[]] except IndexError: break # depends on [control=['except'], data=[]] i += 1 # read first event to get column names if columns is None: columns = _columns_from_frevent(frevent) # depends on [control=['if'], data=['columns']] # read row with filter row = _row_from_frevent(frevent, columns, selection) if row is not None: # if passed selection data.append(row) # depends on [control=['if'], data=['row']] # depends on [control=['while'], data=[]] return Table(rows=data, names=columns)
def create_rule(self, txtrule=None, regex=None, extension=None, cmd=None, codes=[0, None], recurse=True): ''' Adds a set of rules to the extraction rule list. @txtrule - Rule string, or list of rule strings, in the format <regular expression>:<file extension>[:<command to run>] @regex - If rule string is not specified, this is the regular expression string to use. @extension - If rule string is not specified, this is the file extension to use. @cmd - If rule string is not specified, this is the command to run. Alternatively a callable object may be specified, which will be passed one argument: the path to the file to extract. @codes - A list of valid return codes for the extractor. @recurse - If False, extracted directories will not be recursed into when the matryoshka option is enabled. Returns None. ''' rules = [] created_rules = [] match = False r = { 'extension': '', 'cmd': '', 'regex': None, 'codes': codes, 'recurse': recurse, } # Process single explicitly specified rule if not txtrule and regex and extension: r['extension'] = extension r['regex'] = re.compile(regex) if cmd: r['cmd'] = cmd return [r] # Process rule string, or list of rule strings if not isinstance(txtrule, type([])): rules = [txtrule] else: rules = txtrule for rule in rules: r['cmd'] = '' r['extension'] = '' try: values = self._parse_rule(rule) match = values[0] r['regex'] = re.compile(values[0]) r['extension'] = values[1] r['cmd'] = values[2] r['codes'] = values[3] r['recurse'] = values[4] except KeyboardInterrupt as e: raise e except Exception: pass # Verify that the match string was retrieved. if match: created_rules.append(r) return created_rules
def function[create_rule, parameter[self, txtrule, regex, extension, cmd, codes, recurse]]: constant[ Adds a set of rules to the extraction rule list. @txtrule - Rule string, or list of rule strings, in the format <regular expression>:<file extension>[:<command to run>] @regex - If rule string is not specified, this is the regular expression string to use. @extension - If rule string is not specified, this is the file extension to use. @cmd - If rule string is not specified, this is the command to run. Alternatively a callable object may be specified, which will be passed one argument: the path to the file to extract. @codes - A list of valid return codes for the extractor. @recurse - If False, extracted directories will not be recursed into when the matryoshka option is enabled. Returns None. ] variable[rules] assign[=] list[[]] variable[created_rules] assign[=] list[[]] variable[match] assign[=] constant[False] variable[r] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c8a410>, <ast.Constant object at 0x7da1b1c8a320>, <ast.Constant object at 0x7da1b1c8a4a0>, <ast.Constant object at 0x7da1b1c89f30>, <ast.Constant object at 0x7da1b1c8a350>], [<ast.Constant object at 0x7da1b1c8a950>, <ast.Constant object at 0x7da1b1c8a8f0>, <ast.Constant object at 0x7da1b1c8ba90>, <ast.Name object at 0x7da1b1c8bac0>, <ast.Name object at 0x7da1b1c89930>]] if <ast.BoolOp object at 0x7da1b1c8b130> begin[:] call[name[r]][constant[extension]] assign[=] name[extension] call[name[r]][constant[regex]] assign[=] call[name[re].compile, parameter[name[regex]]] if name[cmd] begin[:] call[name[r]][constant[cmd]] assign[=] name[cmd] return[list[[<ast.Name object at 0x7da1b1c89960>]]] if <ast.UnaryOp object at 0x7da1b1c893f0> begin[:] variable[rules] assign[=] list[[<ast.Name object at 0x7da1b1c897e0>]] for taget[name[rule]] in starred[name[rules]] begin[:] call[name[r]][constant[cmd]] assign[=] constant[] call[name[r]][constant[extension]] assign[=] constant[] <ast.Try object at 0x7da1b1c8a230> if name[match] begin[:] call[name[created_rules].append, parameter[name[r]]] return[name[created_rules]]
keyword[def] identifier[create_rule] ( identifier[self] , identifier[txtrule] = keyword[None] , identifier[regex] = keyword[None] , identifier[extension] = keyword[None] , identifier[cmd] = keyword[None] , identifier[codes] =[ literal[int] , keyword[None] ], identifier[recurse] = keyword[True] ): literal[string] identifier[rules] =[] identifier[created_rules] =[] identifier[match] = keyword[False] identifier[r] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : identifier[codes] , literal[string] : identifier[recurse] , } keyword[if] keyword[not] identifier[txtrule] keyword[and] identifier[regex] keyword[and] identifier[extension] : identifier[r] [ literal[string] ]= identifier[extension] identifier[r] [ literal[string] ]= identifier[re] . identifier[compile] ( identifier[regex] ) keyword[if] identifier[cmd] : identifier[r] [ literal[string] ]= identifier[cmd] keyword[return] [ identifier[r] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[txtrule] , identifier[type] ([])): identifier[rules] =[ identifier[txtrule] ] keyword[else] : identifier[rules] = identifier[txtrule] keyword[for] identifier[rule] keyword[in] identifier[rules] : identifier[r] [ literal[string] ]= literal[string] identifier[r] [ literal[string] ]= literal[string] keyword[try] : identifier[values] = identifier[self] . identifier[_parse_rule] ( identifier[rule] ) identifier[match] = identifier[values] [ literal[int] ] identifier[r] [ literal[string] ]= identifier[re] . identifier[compile] ( identifier[values] [ literal[int] ]) identifier[r] [ literal[string] ]= identifier[values] [ literal[int] ] identifier[r] [ literal[string] ]= identifier[values] [ literal[int] ] identifier[r] [ literal[string] ]= identifier[values] [ literal[int] ] identifier[r] [ literal[string] ]= identifier[values] [ literal[int] ] keyword[except] identifier[KeyboardInterrupt] keyword[as] identifier[e] : keyword[raise] identifier[e] keyword[except] identifier[Exception] : keyword[pass] keyword[if] identifier[match] : identifier[created_rules] . identifier[append] ( identifier[r] ) keyword[return] identifier[created_rules]
def create_rule(self, txtrule=None, regex=None, extension=None, cmd=None, codes=[0, None], recurse=True): """ Adds a set of rules to the extraction rule list. @txtrule - Rule string, or list of rule strings, in the format <regular expression>:<file extension>[:<command to run>] @regex - If rule string is not specified, this is the regular expression string to use. @extension - If rule string is not specified, this is the file extension to use. @cmd - If rule string is not specified, this is the command to run. Alternatively a callable object may be specified, which will be passed one argument: the path to the file to extract. @codes - A list of valid return codes for the extractor. @recurse - If False, extracted directories will not be recursed into when the matryoshka option is enabled. Returns None. """ rules = [] created_rules = [] match = False r = {'extension': '', 'cmd': '', 'regex': None, 'codes': codes, 'recurse': recurse} # Process single explicitly specified rule if not txtrule and regex and extension: r['extension'] = extension r['regex'] = re.compile(regex) if cmd: r['cmd'] = cmd # depends on [control=['if'], data=[]] return [r] # depends on [control=['if'], data=[]] # Process rule string, or list of rule strings if not isinstance(txtrule, type([])): rules = [txtrule] # depends on [control=['if'], data=[]] else: rules = txtrule for rule in rules: r['cmd'] = '' r['extension'] = '' try: values = self._parse_rule(rule) match = values[0] r['regex'] = re.compile(values[0]) r['extension'] = values[1] r['cmd'] = values[2] r['codes'] = values[3] r['recurse'] = values[4] # depends on [control=['try'], data=[]] except KeyboardInterrupt as e: raise e # depends on [control=['except'], data=['e']] except Exception: pass # depends on [control=['except'], data=[]] # Verify that the match string was retrieved. if match: created_rules.append(r) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rule']] return created_rules
def info(self, callback=None, **kwargs): """ Get the basic info from the current cluster. """ self.client.fetch( self.mk_req('', method='GET', **kwargs), callback = callback )
def function[info, parameter[self, callback]]: constant[ Get the basic info from the current cluster. ] call[name[self].client.fetch, parameter[call[name[self].mk_req, parameter[constant[]]]]]
keyword[def] identifier[info] ( identifier[self] , identifier[callback] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[client] . identifier[fetch] ( identifier[self] . identifier[mk_req] ( literal[string] , identifier[method] = literal[string] ,** identifier[kwargs] ), identifier[callback] = identifier[callback] )
def info(self, callback=None, **kwargs): """ Get the basic info from the current cluster. """ self.client.fetch(self.mk_req('', method='GET', **kwargs), callback=callback)
def new_connection_status(self, conn_status): """Handle reception of a new ConnectionStatus.""" if conn_status.status == CONNECTION_STATUS_CONNECTED: self._mz.update_members() if (conn_status.status == CONNECTION_STATUS_DISCONNECTED or conn_status.status == CONNECTION_STATUS_LOST): self._mz.reset_members()
def function[new_connection_status, parameter[self, conn_status]]: constant[Handle reception of a new ConnectionStatus.] if compare[name[conn_status].status equal[==] name[CONNECTION_STATUS_CONNECTED]] begin[:] call[name[self]._mz.update_members, parameter[]] if <ast.BoolOp object at 0x7da18f722f50> begin[:] call[name[self]._mz.reset_members, parameter[]]
keyword[def] identifier[new_connection_status] ( identifier[self] , identifier[conn_status] ): literal[string] keyword[if] identifier[conn_status] . identifier[status] == identifier[CONNECTION_STATUS_CONNECTED] : identifier[self] . identifier[_mz] . identifier[update_members] () keyword[if] ( identifier[conn_status] . identifier[status] == identifier[CONNECTION_STATUS_DISCONNECTED] keyword[or] identifier[conn_status] . identifier[status] == identifier[CONNECTION_STATUS_LOST] ): identifier[self] . identifier[_mz] . identifier[reset_members] ()
def new_connection_status(self, conn_status): """Handle reception of a new ConnectionStatus.""" if conn_status.status == CONNECTION_STATUS_CONNECTED: self._mz.update_members() # depends on [control=['if'], data=[]] if conn_status.status == CONNECTION_STATUS_DISCONNECTED or conn_status.status == CONNECTION_STATUS_LOST: self._mz.reset_members() # depends on [control=['if'], data=[]]
def is_repetition(self, count: int = 3) -> bool: """ Checks if the current position has repeated 3 (or a given number of) times. Unlike :func:`~chess.Board.can_claim_threefold_repetition()`, this does not consider a repetition that can be played on the next move. Note that checking this can be slow: In the worst case the entire game has to be replayed because there is no incremental transposition table. """ transposition_key = self._transposition_key() switchyard = [] try: while True: if count <= 1: return True if not self.move_stack: break move = self.pop() switchyard.append(move) if self.is_irreversible(move): break if self._transposition_key() == transposition_key: count -= 1 finally: while switchyard: self.push(switchyard.pop()) return False
def function[is_repetition, parameter[self, count]]: constant[ Checks if the current position has repeated 3 (or a given number of) times. Unlike :func:`~chess.Board.can_claim_threefold_repetition()`, this does not consider a repetition that can be played on the next move. Note that checking this can be slow: In the worst case the entire game has to be replayed because there is no incremental transposition table. ] variable[transposition_key] assign[=] call[name[self]._transposition_key, parameter[]] variable[switchyard] assign[=] list[[]] <ast.Try object at 0x7da1b1781a50> return[constant[False]]
keyword[def] identifier[is_repetition] ( identifier[self] , identifier[count] : identifier[int] = literal[int] )-> identifier[bool] : literal[string] identifier[transposition_key] = identifier[self] . identifier[_transposition_key] () identifier[switchyard] =[] keyword[try] : keyword[while] keyword[True] : keyword[if] identifier[count] <= literal[int] : keyword[return] keyword[True] keyword[if] keyword[not] identifier[self] . identifier[move_stack] : keyword[break] identifier[move] = identifier[self] . identifier[pop] () identifier[switchyard] . identifier[append] ( identifier[move] ) keyword[if] identifier[self] . identifier[is_irreversible] ( identifier[move] ): keyword[break] keyword[if] identifier[self] . identifier[_transposition_key] ()== identifier[transposition_key] : identifier[count] -= literal[int] keyword[finally] : keyword[while] identifier[switchyard] : identifier[self] . identifier[push] ( identifier[switchyard] . identifier[pop] ()) keyword[return] keyword[False]
def is_repetition(self, count: int=3) -> bool: """ Checks if the current position has repeated 3 (or a given number of) times. Unlike :func:`~chess.Board.can_claim_threefold_repetition()`, this does not consider a repetition that can be played on the next move. Note that checking this can be slow: In the worst case the entire game has to be replayed because there is no incremental transposition table. """ transposition_key = self._transposition_key() switchyard = [] try: while True: if count <= 1: return True # depends on [control=['if'], data=[]] if not self.move_stack: break # depends on [control=['if'], data=[]] move = self.pop() switchyard.append(move) if self.is_irreversible(move): break # depends on [control=['if'], data=[]] if self._transposition_key() == transposition_key: count -= 1 # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] finally: while switchyard: self.push(switchyard.pop()) # depends on [control=['while'], data=[]] return False
def open(path, mode=None, ac_parser=None, **options): """ Open given configuration file with appropriate open flag. :param path: Configuration file path :param mode: Can be 'r' and 'rb' for reading (default) or 'w', 'wb' for writing. Please note that even if you specify 'r' or 'w', it will be changed to 'rb' or 'wb' if selected backend, xml and configobj for example, for given config file prefer that. :param options: Optional keyword arguments passed to the internal file opening APIs of each backends such like 'buffering' optional parameter passed to builtin 'open' function. :return: A file object or None on any errors :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ psr = find(path, forced_type=ac_parser) if mode is not None and mode.startswith('w'): return psr.wopen(path, **options) return psr.ropen(path, **options)
def function[open, parameter[path, mode, ac_parser]]: constant[ Open given configuration file with appropriate open flag. :param path: Configuration file path :param mode: Can be 'r' and 'rb' for reading (default) or 'w', 'wb' for writing. Please note that even if you specify 'r' or 'w', it will be changed to 'rb' or 'wb' if selected backend, xml and configobj for example, for given config file prefer that. :param options: Optional keyword arguments passed to the internal file opening APIs of each backends such like 'buffering' optional parameter passed to builtin 'open' function. :return: A file object or None on any errors :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError ] variable[psr] assign[=] call[name[find], parameter[name[path]]] if <ast.BoolOp object at 0x7da207f01cf0> begin[:] return[call[name[psr].wopen, parameter[name[path]]]] return[call[name[psr].ropen, parameter[name[path]]]]
keyword[def] identifier[open] ( identifier[path] , identifier[mode] = keyword[None] , identifier[ac_parser] = keyword[None] ,** identifier[options] ): literal[string] identifier[psr] = identifier[find] ( identifier[path] , identifier[forced_type] = identifier[ac_parser] ) keyword[if] identifier[mode] keyword[is] keyword[not] keyword[None] keyword[and] identifier[mode] . identifier[startswith] ( literal[string] ): keyword[return] identifier[psr] . identifier[wopen] ( identifier[path] ,** identifier[options] ) keyword[return] identifier[psr] . identifier[ropen] ( identifier[path] ,** identifier[options] )
def open(path, mode=None, ac_parser=None, **options): """ Open given configuration file with appropriate open flag. :param path: Configuration file path :param mode: Can be 'r' and 'rb' for reading (default) or 'w', 'wb' for writing. Please note that even if you specify 'r' or 'w', it will be changed to 'rb' or 'wb' if selected backend, xml and configobj for example, for given config file prefer that. :param options: Optional keyword arguments passed to the internal file opening APIs of each backends such like 'buffering' optional parameter passed to builtin 'open' function. :return: A file object or None on any errors :raises: ValueError, UnknownProcessorTypeError, UnknownFileTypeError """ psr = find(path, forced_type=ac_parser) if mode is not None and mode.startswith('w'): return psr.wopen(path, **options) # depends on [control=['if'], data=[]] return psr.ropen(path, **options)
def _encode_to_binary_string(message, on, off): """ >>> message = "SOS" >>> _encode_to_binary_string(message, on='1', off='0') '101010001110111011100010101' >>> message = " SOS" >>> _encode_to_binary_string(message, on='1', off='0') '0000000101010001110111011100010101' """ def to_string(i, s): if i == 0 and s == off: return off * 4 return s return ''.join(to_string(i, s) for i, s in enumerate(_encode_binary(message, on=on, off=off)))
def function[_encode_to_binary_string, parameter[message, on, off]]: constant[ >>> message = "SOS" >>> _encode_to_binary_string(message, on='1', off='0') '101010001110111011100010101' >>> message = " SOS" >>> _encode_to_binary_string(message, on='1', off='0') '0000000101010001110111011100010101' ] def function[to_string, parameter[i, s]]: if <ast.BoolOp object at 0x7da20c6aa8c0> begin[:] return[binary_operation[name[off] * constant[4]]] return[name[s]] return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20c6a8640>]]]
keyword[def] identifier[_encode_to_binary_string] ( identifier[message] , identifier[on] , identifier[off] ): literal[string] keyword[def] identifier[to_string] ( identifier[i] , identifier[s] ): keyword[if] identifier[i] == literal[int] keyword[and] identifier[s] == identifier[off] : keyword[return] identifier[off] * literal[int] keyword[return] identifier[s] keyword[return] literal[string] . identifier[join] ( identifier[to_string] ( identifier[i] , identifier[s] ) keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[_encode_binary] ( identifier[message] , identifier[on] = identifier[on] , identifier[off] = identifier[off] )))
def _encode_to_binary_string(message, on, off): """ >>> message = "SOS" >>> _encode_to_binary_string(message, on='1', off='0') '101010001110111011100010101' >>> message = " SOS" >>> _encode_to_binary_string(message, on='1', off='0') '0000000101010001110111011100010101' """ def to_string(i, s): if i == 0 and s == off: return off * 4 # depends on [control=['if'], data=[]] return s return ''.join((to_string(i, s) for (i, s) in enumerate(_encode_binary(message, on=on, off=off))))
def project_from_files( files, func_wrapper=_astroid_wrapper, project_name="no name", black_list=("CVS",) ): """return a Project from a list of files or modules""" # build the project representation astroid_manager = manager.AstroidManager() project = Project(project_name) for something in files: if not os.path.exists(something): fpath = modutils.file_from_modpath(something.split(".")) elif os.path.isdir(something): fpath = os.path.join(something, "__init__.py") else: fpath = something ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None: continue # XXX why is first file defining the project.path ? project.path = project.path or ast.file project.add_module(ast) base_name = ast.name # recurse in package except if __init__ was explicitly given if ast.package and something.find("__init__") == -1: # recurse on others packages / modules if this is a package for fpath in modutils.get_module_files( os.path.dirname(ast.file), black_list ): ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None or ast.name == base_name: continue project.add_module(ast) return project
def function[project_from_files, parameter[files, func_wrapper, project_name, black_list]]: constant[return a Project from a list of files or modules] variable[astroid_manager] assign[=] call[name[manager].AstroidManager, parameter[]] variable[project] assign[=] call[name[Project], parameter[name[project_name]]] for taget[name[something]] in starred[name[files]] begin[:] if <ast.UnaryOp object at 0x7da1b0351ed0> begin[:] variable[fpath] assign[=] call[name[modutils].file_from_modpath, parameter[call[name[something].split, parameter[constant[.]]]]] variable[ast] assign[=] call[name[func_wrapper], parameter[name[astroid_manager].ast_from_file, name[fpath]]] if compare[name[ast] is constant[None]] begin[:] continue name[project].path assign[=] <ast.BoolOp object at 0x7da1b0351d50> call[name[project].add_module, parameter[name[ast]]] variable[base_name] assign[=] name[ast].name if <ast.BoolOp object at 0x7da1b02f1990> begin[:] for taget[name[fpath]] in starred[call[name[modutils].get_module_files, parameter[call[name[os].path.dirname, parameter[name[ast].file]], name[black_list]]]] begin[:] variable[ast] assign[=] call[name[func_wrapper], parameter[name[astroid_manager].ast_from_file, name[fpath]]] if <ast.BoolOp object at 0x7da1b02f32b0> begin[:] continue call[name[project].add_module, parameter[name[ast]]] return[name[project]]
keyword[def] identifier[project_from_files] ( identifier[files] , identifier[func_wrapper] = identifier[_astroid_wrapper] , identifier[project_name] = literal[string] , identifier[black_list] =( literal[string] ,) ): literal[string] identifier[astroid_manager] = identifier[manager] . identifier[AstroidManager] () identifier[project] = identifier[Project] ( identifier[project_name] ) keyword[for] identifier[something] keyword[in] identifier[files] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[something] ): identifier[fpath] = identifier[modutils] . identifier[file_from_modpath] ( identifier[something] . identifier[split] ( literal[string] )) keyword[elif] identifier[os] . identifier[path] . identifier[isdir] ( identifier[something] ): identifier[fpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[something] , literal[string] ) keyword[else] : identifier[fpath] = identifier[something] identifier[ast] = identifier[func_wrapper] ( identifier[astroid_manager] . identifier[ast_from_file] , identifier[fpath] ) keyword[if] identifier[ast] keyword[is] keyword[None] : keyword[continue] identifier[project] . identifier[path] = identifier[project] . identifier[path] keyword[or] identifier[ast] . identifier[file] identifier[project] . identifier[add_module] ( identifier[ast] ) identifier[base_name] = identifier[ast] . identifier[name] keyword[if] identifier[ast] . identifier[package] keyword[and] identifier[something] . identifier[find] ( literal[string] )==- literal[int] : keyword[for] identifier[fpath] keyword[in] identifier[modutils] . identifier[get_module_files] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[ast] . identifier[file] ), identifier[black_list] ): identifier[ast] = identifier[func_wrapper] ( identifier[astroid_manager] . identifier[ast_from_file] , identifier[fpath] ) keyword[if] identifier[ast] keyword[is] keyword[None] keyword[or] identifier[ast] . identifier[name] == identifier[base_name] : keyword[continue] identifier[project] . identifier[add_module] ( identifier[ast] ) keyword[return] identifier[project]
def project_from_files(files, func_wrapper=_astroid_wrapper, project_name='no name', black_list=('CVS',)): """return a Project from a list of files or modules""" # build the project representation astroid_manager = manager.AstroidManager() project = Project(project_name) for something in files: if not os.path.exists(something): fpath = modutils.file_from_modpath(something.split('.')) # depends on [control=['if'], data=[]] elif os.path.isdir(something): fpath = os.path.join(something, '__init__.py') # depends on [control=['if'], data=[]] else: fpath = something ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None: continue # depends on [control=['if'], data=[]] # XXX why is first file defining the project.path ? project.path = project.path or ast.file project.add_module(ast) base_name = ast.name # recurse in package except if __init__ was explicitly given if ast.package and something.find('__init__') == -1: # recurse on others packages / modules if this is a package for fpath in modutils.get_module_files(os.path.dirname(ast.file), black_list): ast = func_wrapper(astroid_manager.ast_from_file, fpath) if ast is None or ast.name == base_name: continue # depends on [control=['if'], data=[]] project.add_module(ast) # depends on [control=['for'], data=['fpath']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['something']] return project
def _sub_latlon(self, other): ''' Called when subtracting a LatLon object from self ''' inv = self._pyproj_inv(other) heading = inv['heading_reverse'] distance = inv['distance'] return GeoVector(initial_heading = heading, distance = distance)
def function[_sub_latlon, parameter[self, other]]: constant[ Called when subtracting a LatLon object from self ] variable[inv] assign[=] call[name[self]._pyproj_inv, parameter[name[other]]] variable[heading] assign[=] call[name[inv]][constant[heading_reverse]] variable[distance] assign[=] call[name[inv]][constant[distance]] return[call[name[GeoVector], parameter[]]]
keyword[def] identifier[_sub_latlon] ( identifier[self] , identifier[other] ): literal[string] identifier[inv] = identifier[self] . identifier[_pyproj_inv] ( identifier[other] ) identifier[heading] = identifier[inv] [ literal[string] ] identifier[distance] = identifier[inv] [ literal[string] ] keyword[return] identifier[GeoVector] ( identifier[initial_heading] = identifier[heading] , identifier[distance] = identifier[distance] )
def _sub_latlon(self, other): """ Called when subtracting a LatLon object from self """ inv = self._pyproj_inv(other) heading = inv['heading_reverse'] distance = inv['distance'] return GeoVector(initial_heading=heading, distance=distance)
def send(self, tids, session, **kwargs): '''taobao.topats.delivery.send 异步批量物流发货api 使用指南:http://open.taobao.com/dev/index.php/ATS%E4%BD%BF%E7%94%A8%E6%8C%87%E5%8D%97 - 1.提供异步批量物流发货功能 - 2.一次最多发货40个订单 - 3.提交任务会进行初步任务校验,如果成功会返回任务号和创建时间,如果失败就报错 - 4.可以接收淘宝发出的任务完成消息,也可以过一段时间来取结果。获取结果接口为taobao.topats.result.get - 5.此api执行完成发送的通知消息格式为{"task":{"task_id":123456,"created":"2010-8-19"}}''' request = TOPRequest('taobao.postage.add') request['tids'] = tids for k, v in kwargs.iteritems(): if k not in ('company_codes', 'out_sids', 'seller_name', 'seller_area_id', 'seller_address', 'seller_zip', 'seller_phone', 'seller_mobile', 'order_types', 'memos') and v==None: continue request[k] = v self.create(self.execute(request, session)) return self.task
def function[send, parameter[self, tids, session]]: constant[taobao.topats.delivery.send 异步批量物流发货api 使用指南:http://open.taobao.com/dev/index.php/ATS%E4%BD%BF%E7%94%A8%E6%8C%87%E5%8D%97 - 1.提供异步批量物流发货功能 - 2.一次最多发货40个订单 - 3.提交任务会进行初步任务校验,如果成功会返回任务号和创建时间,如果失败就报错 - 4.可以接收淘宝发出的任务完成消息,也可以过一段时间来取结果。获取结果接口为taobao.topats.result.get - 5.此api执行完成发送的通知消息格式为{"task":{"task_id":123456,"created":"2010-8-19"}}] variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.postage.add]]] call[name[request]][constant[tids]] assign[=] name[tids] for taget[tuple[[<ast.Name object at 0x7da1b25f95d0>, <ast.Name object at 0x7da1b25f9420>]]] in starred[call[name[kwargs].iteritems, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b25f97b0> begin[:] continue call[name[request]][name[k]] assign[=] name[v] call[name[self].create, parameter[call[name[self].execute, parameter[name[request], name[session]]]]] return[name[self].task]
keyword[def] identifier[send] ( identifier[self] , identifier[tids] , identifier[session] ,** identifier[kwargs] ): literal[string] identifier[request] = identifier[TOPRequest] ( literal[string] ) identifier[request] [ literal[string] ]= identifier[tids] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[iteritems] (): keyword[if] identifier[k] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) keyword[and] identifier[v] == keyword[None] : keyword[continue] identifier[request] [ identifier[k] ]= identifier[v] identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] )) keyword[return] identifier[self] . identifier[task]
def send(self, tids, session, **kwargs): """taobao.topats.delivery.send 异步批量物流发货api 使用指南:http://open.taobao.com/dev/index.php/ATS%E4%BD%BF%E7%94%A8%E6%8C%87%E5%8D%97 - 1.提供异步批量物流发货功能 - 2.一次最多发货40个订单 - 3.提交任务会进行初步任务校验,如果成功会返回任务号和创建时间,如果失败就报错 - 4.可以接收淘宝发出的任务完成消息,也可以过一段时间来取结果。获取结果接口为taobao.topats.result.get - 5.此api执行完成发送的通知消息格式为{"task":{"task_id":123456,"created":"2010-8-19"}}""" request = TOPRequest('taobao.postage.add') request['tids'] = tids for (k, v) in kwargs.iteritems(): if k not in ('company_codes', 'out_sids', 'seller_name', 'seller_area_id', 'seller_address', 'seller_zip', 'seller_phone', 'seller_mobile', 'order_types', 'memos') and v == None: continue # depends on [control=['if'], data=[]] request[k] = v # depends on [control=['for'], data=[]] self.create(self.execute(request, session)) return self.task
def write_env_vars(env_vars=None): # type: (dict) -> None """Write the dictionary env_vars in the system, as environment variables. Args: env_vars (): Returns: """ env_vars = env_vars or {} env_vars['PYTHONPATH'] = ':'.join(sys.path) for name, value in env_vars.items(): os.environ[name] = value
def function[write_env_vars, parameter[env_vars]]: constant[Write the dictionary env_vars in the system, as environment variables. Args: env_vars (): Returns: ] variable[env_vars] assign[=] <ast.BoolOp object at 0x7da1b16bd420> call[name[env_vars]][constant[PYTHONPATH]] assign[=] call[constant[:].join, parameter[name[sys].path]] for taget[tuple[[<ast.Name object at 0x7da1b16bcfa0>, <ast.Name object at 0x7da1b16be980>]]] in starred[call[name[env_vars].items, parameter[]]] begin[:] call[name[os].environ][name[name]] assign[=] name[value]
keyword[def] identifier[write_env_vars] ( identifier[env_vars] = keyword[None] ): literal[string] identifier[env_vars] = identifier[env_vars] keyword[or] {} identifier[env_vars] [ literal[string] ]= literal[string] . identifier[join] ( identifier[sys] . identifier[path] ) keyword[for] identifier[name] , identifier[value] keyword[in] identifier[env_vars] . identifier[items] (): identifier[os] . identifier[environ] [ identifier[name] ]= identifier[value]
def write_env_vars(env_vars=None): # type: (dict) -> None 'Write the dictionary env_vars in the system, as environment variables.\n\n Args:\n env_vars ():\n\n Returns:\n\n ' env_vars = env_vars or {} env_vars['PYTHONPATH'] = ':'.join(sys.path) for (name, value) in env_vars.items(): os.environ[name] = value # depends on [control=['for'], data=[]]
def peak_memory_usage(): """Return peak memory usage in MB""" if sys.platform.startswith('win'): p = psutil.Process() return p.memory_info().peak_wset / 1024 / 1024 mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss factor_mb = 1 / 1024 if sys.platform == 'darwin': factor_mb = 1 / (1024 * 1024) return mem * factor_mb
def function[peak_memory_usage, parameter[]]: constant[Return peak memory usage in MB] if call[name[sys].platform.startswith, parameter[constant[win]]] begin[:] variable[p] assign[=] call[name[psutil].Process, parameter[]] return[binary_operation[binary_operation[call[name[p].memory_info, parameter[]].peak_wset / constant[1024]] / constant[1024]]] variable[mem] assign[=] call[name[resource].getrusage, parameter[name[resource].RUSAGE_SELF]].ru_maxrss variable[factor_mb] assign[=] binary_operation[constant[1] / constant[1024]] if compare[name[sys].platform equal[==] constant[darwin]] begin[:] variable[factor_mb] assign[=] binary_operation[constant[1] / binary_operation[constant[1024] * constant[1024]]] return[binary_operation[name[mem] * name[factor_mb]]]
keyword[def] identifier[peak_memory_usage] (): literal[string] keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ): identifier[p] = identifier[psutil] . identifier[Process] () keyword[return] identifier[p] . identifier[memory_info] (). identifier[peak_wset] / literal[int] / literal[int] identifier[mem] = identifier[resource] . identifier[getrusage] ( identifier[resource] . identifier[RUSAGE_SELF] ). identifier[ru_maxrss] identifier[factor_mb] = literal[int] / literal[int] keyword[if] identifier[sys] . identifier[platform] == literal[string] : identifier[factor_mb] = literal[int] /( literal[int] * literal[int] ) keyword[return] identifier[mem] * identifier[factor_mb]
def peak_memory_usage(): """Return peak memory usage in MB""" if sys.platform.startswith('win'): p = psutil.Process() return p.memory_info().peak_wset / 1024 / 1024 # depends on [control=['if'], data=[]] mem = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss factor_mb = 1 / 1024 if sys.platform == 'darwin': factor_mb = 1 / (1024 * 1024) # depends on [control=['if'], data=[]] return mem * factor_mb
def list_nsgs_all(access_token, subscription_id): '''List all network security groups in a subscription. Args: access_token (str): a valid Azure Authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of all network security groups in a subscription. ''' endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Network/', 'networkSEcurityGroups?api-version=', NETWORK_API]) return do_get(endpoint, access_token)
def function[list_nsgs_all, parameter[access_token, subscription_id]]: constant[List all network security groups in a subscription. Args: access_token (str): a valid Azure Authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of all network security groups in a subscription. ] variable[endpoint] assign[=] call[constant[].join, parameter[list[[<ast.Call object at 0x7da1b04ca2c0>, <ast.Constant object at 0x7da1b05698d0>, <ast.Name object at 0x7da1b0568430>, <ast.Constant object at 0x7da1b056a6b0>, <ast.Constant object at 0x7da1b05698a0>, <ast.Name object at 0x7da1b056b8b0>]]]] return[call[name[do_get], parameter[name[endpoint], name[access_token]]]]
keyword[def] identifier[list_nsgs_all] ( identifier[access_token] , identifier[subscription_id] ): literal[string] identifier[endpoint] = literal[string] . identifier[join] ([ identifier[get_rm_endpoint] (), literal[string] , identifier[subscription_id] , literal[string] , literal[string] , identifier[NETWORK_API] ]) keyword[return] identifier[do_get] ( identifier[endpoint] , identifier[access_token] )
def list_nsgs_all(access_token, subscription_id): """List all network security groups in a subscription. Args: access_token (str): a valid Azure Authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of all network security groups in a subscription. """ endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Network/', 'networkSEcurityGroups?api-version=', NETWORK_API]) return do_get(endpoint, access_token)
def phrase_replace(self, replace_dict): """ Replace phrases with single token, mapping defined in replace_dict """ def r(tokens): text = ' ' + ' '.join(tokens) for k, v in replace_dict.items(): text = text.replace(" " + k + " ", " " + v + " ") return text.split() self.stems = list(map(r, self.stems))
def function[phrase_replace, parameter[self, replace_dict]]: constant[ Replace phrases with single token, mapping defined in replace_dict ] def function[r, parameter[tokens]]: variable[text] assign[=] binary_operation[constant[ ] + call[constant[ ].join, parameter[name[tokens]]]] for taget[tuple[[<ast.Name object at 0x7da1b0608fd0>, <ast.Name object at 0x7da1b0608b80>]]] in starred[call[name[replace_dict].items, parameter[]]] begin[:] variable[text] assign[=] call[name[text].replace, parameter[binary_operation[binary_operation[constant[ ] + name[k]] + constant[ ]], binary_operation[binary_operation[constant[ ] + name[v]] + constant[ ]]]] return[call[name[text].split, parameter[]]] name[self].stems assign[=] call[name[list], parameter[call[name[map], parameter[name[r], name[self].stems]]]]
keyword[def] identifier[phrase_replace] ( identifier[self] , identifier[replace_dict] ): literal[string] keyword[def] identifier[r] ( identifier[tokens] ): identifier[text] = literal[string] + literal[string] . identifier[join] ( identifier[tokens] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[replace_dict] . identifier[items] (): identifier[text] = identifier[text] . identifier[replace] ( literal[string] + identifier[k] + literal[string] , literal[string] + identifier[v] + literal[string] ) keyword[return] identifier[text] . identifier[split] () identifier[self] . identifier[stems] = identifier[list] ( identifier[map] ( identifier[r] , identifier[self] . identifier[stems] ))
def phrase_replace(self, replace_dict): """ Replace phrases with single token, mapping defined in replace_dict """ def r(tokens): text = ' ' + ' '.join(tokens) for (k, v) in replace_dict.items(): text = text.replace(' ' + k + ' ', ' ' + v + ' ') # depends on [control=['for'], data=[]] return text.split() self.stems = list(map(r, self.stems))
def get_default_field_names(self, declared_fields, model_info): """ Return the default list of field names that will be used if the `Meta.fields` option is not specified. """ return ( [model_info.pk.name] + list(declared_fields.keys()) + list(model_info.fields.keys()) + list(model_info.forward_relations.keys()) )
def function[get_default_field_names, parameter[self, declared_fields, model_info]]: constant[ Return the default list of field names that will be used if the `Meta.fields` option is not specified. ] return[binary_operation[binary_operation[binary_operation[list[[<ast.Attribute object at 0x7da18dc9ba90>]] + call[name[list], parameter[call[name[declared_fields].keys, parameter[]]]]] + call[name[list], parameter[call[name[model_info].fields.keys, parameter[]]]]] + call[name[list], parameter[call[name[model_info].forward_relations.keys, parameter[]]]]]]
keyword[def] identifier[get_default_field_names] ( identifier[self] , identifier[declared_fields] , identifier[model_info] ): literal[string] keyword[return] ( [ identifier[model_info] . identifier[pk] . identifier[name] ]+ identifier[list] ( identifier[declared_fields] . identifier[keys] ())+ identifier[list] ( identifier[model_info] . identifier[fields] . identifier[keys] ())+ identifier[list] ( identifier[model_info] . identifier[forward_relations] . identifier[keys] ()) )
def get_default_field_names(self, declared_fields, model_info): """ Return the default list of field names that will be used if the `Meta.fields` option is not specified. """ return [model_info.pk.name] + list(declared_fields.keys()) + list(model_info.fields.keys()) + list(model_info.forward_relations.keys())
def p_args(self, p): """args : LPAR pos_args_list COMMA kw_args RPAR | LPAR pos_args_list RPAR | LPAR kw_args RPAR | LPAR RPAR | empty""" if len(p) > 3: if p[3] == ',': p[0] = (p[2], p[4]) elif isinstance(p[2], dict): p[0] = ([], p[2]) else: p[0] = (p[2], {}) else: p[0] = ([], {})
def function[p_args, parameter[self, p]]: constant[args : LPAR pos_args_list COMMA kw_args RPAR | LPAR pos_args_list RPAR | LPAR kw_args RPAR | LPAR RPAR | empty] if compare[call[name[len], parameter[name[p]]] greater[>] constant[3]] begin[:] if compare[call[name[p]][constant[3]] equal[==] constant[,]] begin[:] call[name[p]][constant[0]] assign[=] tuple[[<ast.Subscript object at 0x7da18f58e920>, <ast.Subscript object at 0x7da18f58ce20>]]
keyword[def] identifier[p_args] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )> literal[int] : keyword[if] identifier[p] [ literal[int] ]== literal[string] : identifier[p] [ literal[int] ]=( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]) keyword[elif] identifier[isinstance] ( identifier[p] [ literal[int] ], identifier[dict] ): identifier[p] [ literal[int] ]=([], identifier[p] [ literal[int] ]) keyword[else] : identifier[p] [ literal[int] ]=( identifier[p] [ literal[int] ],{}) keyword[else] : identifier[p] [ literal[int] ]=([],{})
def p_args(self, p): """args : LPAR pos_args_list COMMA kw_args RPAR | LPAR pos_args_list RPAR | LPAR kw_args RPAR | LPAR RPAR | empty""" if len(p) > 3: if p[3] == ',': p[0] = (p[2], p[4]) # depends on [control=['if'], data=[]] elif isinstance(p[2], dict): p[0] = ([], p[2]) # depends on [control=['if'], data=[]] else: p[0] = (p[2], {}) # depends on [control=['if'], data=[]] else: p[0] = ([], {})
def main(): '''Main routine.''' # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--add', '-a', action='store_true', default=False, help='add a key vault') arg_parser.add_argument('--delete', '-d', action='store_true', default=False, help='delete a key vault') arg_parser.add_argument('--name', '-n', required=True, action='store', help='Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--location', '-l', required=True, action='store', help='Location, e.g. eastus') arg_parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Print operational details') args = arg_parser.parse_args() name = args.name rgname = args.rgname location = args.location if args.add is True and args.delete is True: sys.exit('Specify --add or --delete, not both.') if args.add is False and args.delete is False: sys.exit('No operation specified, use --add or --delete.') if 'ACC_CLOUD' in os.environ and 'MSI_ENDPOINT' in os.environ: endpoint = os.environ['MSI_ENDPOINT'] else: sys.exit('Not running in cloud shell or MSI_ENDPOINT not set') # get Azure auth token if args.verbose is True: print('Getting Azure token from MSI endpoint..') access_token = azurerm.get_access_token_from_cli() if args.verbose is True: print('Getting Azure subscription ID from MSI endpoint..') subscription_id = azurerm.get_subscription_from_cli() # execute specified operation if args.add is True: # create a key vault # get Azure tenant ID if args.verbose is True: print('Getting list of tenant IDs...') tenants = azurerm.list_tenants(access_token) tenant_id = tenants['value'][0]['tenantId'] if args.verbose is True: print('My tenantId = ' + tenant_id) # get Graph object ID if args.verbose is True: print('Querying graph...') object_id = azurerm.get_object_id_from_graph() if args.verbose is True: print('My object ID = ' + object_id) # create key vault ret = azurerm.create_keyvault(access_token, subscription_id, rgname, name, location, tenant_id=tenant_id, object_id=object_id) if ret.status_code == 200: print('Successsfully created key vault: ' + name) print('Vault URI: ' + ret.json()['properties']['vaultUri']) else: print('Return code ' + str(ret.status_code) + ' from create_keyvault().') print(ret.text) # else delete named key vault else: ret = azurerm.delete_keyvault(access_token, subscription_id, rgname, name) if ret.status_code == 200: print('Successsfully deleted key vault: ' + name) else: print('Return code ' + str(ret.status_code) + ' from delete_keyvault().') print(ret.text)
def function[main, parameter[]]: constant[Main routine.] variable[arg_parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[arg_parser].add_argument, parameter[constant[--add], constant[-a]]] call[name[arg_parser].add_argument, parameter[constant[--delete], constant[-d]]] call[name[arg_parser].add_argument, parameter[constant[--name], constant[-n]]] call[name[arg_parser].add_argument, parameter[constant[--rgname], constant[-g]]] call[name[arg_parser].add_argument, parameter[constant[--location], constant[-l]]] call[name[arg_parser].add_argument, parameter[constant[--verbose], constant[-v]]] variable[args] assign[=] call[name[arg_parser].parse_args, parameter[]] variable[name] assign[=] name[args].name variable[rgname] assign[=] name[args].rgname variable[location] assign[=] name[args].location if <ast.BoolOp object at 0x7da1b0579000> begin[:] call[name[sys].exit, parameter[constant[Specify --add or --delete, not both.]]] if <ast.BoolOp object at 0x7da1b057a260> begin[:] call[name[sys].exit, parameter[constant[No operation specified, use --add or --delete.]]] if <ast.BoolOp object at 0x7da1b057ab30> begin[:] variable[endpoint] assign[=] call[name[os].environ][constant[MSI_ENDPOINT]] if compare[name[args].verbose is constant[True]] begin[:] call[name[print], parameter[constant[Getting Azure token from MSI endpoint..]]] variable[access_token] assign[=] call[name[azurerm].get_access_token_from_cli, parameter[]] if compare[name[args].verbose is constant[True]] begin[:] call[name[print], parameter[constant[Getting Azure subscription ID from MSI endpoint..]]] variable[subscription_id] assign[=] call[name[azurerm].get_subscription_from_cli, parameter[]] if compare[name[args].add is constant[True]] begin[:] if compare[name[args].verbose is constant[True]] begin[:] call[name[print], parameter[constant[Getting list of tenant IDs...]]] variable[tenants] assign[=] call[name[azurerm].list_tenants, parameter[name[access_token]]] variable[tenant_id] assign[=] call[call[call[name[tenants]][constant[value]]][constant[0]]][constant[tenantId]] if compare[name[args].verbose is constant[True]] begin[:] call[name[print], parameter[binary_operation[constant[My tenantId = ] + name[tenant_id]]]] if compare[name[args].verbose is constant[True]] begin[:] call[name[print], parameter[constant[Querying graph...]]] variable[object_id] assign[=] call[name[azurerm].get_object_id_from_graph, parameter[]] if compare[name[args].verbose is constant[True]] begin[:] call[name[print], parameter[binary_operation[constant[My object ID = ] + name[object_id]]]] variable[ret] assign[=] call[name[azurerm].create_keyvault, parameter[name[access_token], name[subscription_id], name[rgname], name[name], name[location]]] if compare[name[ret].status_code equal[==] constant[200]] begin[:] call[name[print], parameter[binary_operation[constant[Successsfully created key vault: ] + name[name]]]] call[name[print], parameter[binary_operation[constant[Vault URI: ] + call[call[call[name[ret].json, parameter[]]][constant[properties]]][constant[vaultUri]]]]]
keyword[def] identifier[main] (): literal[string] identifier[arg_parser] = identifier[argparse] . identifier[ArgumentParser] () identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[required] = keyword[True] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[arg_parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[args] = identifier[arg_parser] . identifier[parse_args] () identifier[name] = identifier[args] . identifier[name] identifier[rgname] = identifier[args] . identifier[rgname] identifier[location] = identifier[args] . identifier[location] keyword[if] identifier[args] . identifier[add] keyword[is] keyword[True] keyword[and] identifier[args] . identifier[delete] keyword[is] keyword[True] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] identifier[args] . identifier[add] keyword[is] keyword[False] keyword[and] identifier[args] . identifier[delete] keyword[is] keyword[False] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] keyword[and] literal[string] keyword[in] identifier[os] . identifier[environ] : identifier[endpoint] = identifier[os] . identifier[environ] [ literal[string] ] keyword[else] : identifier[sys] . identifier[exit] ( literal[string] ) keyword[if] identifier[args] . identifier[verbose] keyword[is] keyword[True] : identifier[print] ( literal[string] ) identifier[access_token] = identifier[azurerm] . identifier[get_access_token_from_cli] () keyword[if] identifier[args] . identifier[verbose] keyword[is] keyword[True] : identifier[print] ( literal[string] ) identifier[subscription_id] = identifier[azurerm] . identifier[get_subscription_from_cli] () keyword[if] identifier[args] . identifier[add] keyword[is] keyword[True] : keyword[if] identifier[args] . identifier[verbose] keyword[is] keyword[True] : identifier[print] ( literal[string] ) identifier[tenants] = identifier[azurerm] . identifier[list_tenants] ( identifier[access_token] ) identifier[tenant_id] = identifier[tenants] [ literal[string] ][ literal[int] ][ literal[string] ] keyword[if] identifier[args] . identifier[verbose] keyword[is] keyword[True] : identifier[print] ( literal[string] + identifier[tenant_id] ) keyword[if] identifier[args] . identifier[verbose] keyword[is] keyword[True] : identifier[print] ( literal[string] ) identifier[object_id] = identifier[azurerm] . identifier[get_object_id_from_graph] () keyword[if] identifier[args] . identifier[verbose] keyword[is] keyword[True] : identifier[print] ( literal[string] + identifier[object_id] ) identifier[ret] = identifier[azurerm] . identifier[create_keyvault] ( identifier[access_token] , identifier[subscription_id] , identifier[rgname] , identifier[name] , identifier[location] , identifier[tenant_id] = identifier[tenant_id] , identifier[object_id] = identifier[object_id] ) keyword[if] identifier[ret] . identifier[status_code] == literal[int] : identifier[print] ( literal[string] + identifier[name] ) identifier[print] ( literal[string] + identifier[ret] . identifier[json] ()[ literal[string] ][ literal[string] ]) keyword[else] : identifier[print] ( literal[string] + identifier[str] ( identifier[ret] . identifier[status_code] )+ literal[string] ) identifier[print] ( identifier[ret] . identifier[text] ) keyword[else] : identifier[ret] = identifier[azurerm] . identifier[delete_keyvault] ( identifier[access_token] , identifier[subscription_id] , identifier[rgname] , identifier[name] ) keyword[if] identifier[ret] . identifier[status_code] == literal[int] : identifier[print] ( literal[string] + identifier[name] ) keyword[else] : identifier[print] ( literal[string] + identifier[str] ( identifier[ret] . identifier[status_code] )+ literal[string] ) identifier[print] ( identifier[ret] . identifier[text] )
def main(): """Main routine.""" # validate command line arguments arg_parser = argparse.ArgumentParser() arg_parser.add_argument('--add', '-a', action='store_true', default=False, help='add a key vault') arg_parser.add_argument('--delete', '-d', action='store_true', default=False, help='delete a key vault') arg_parser.add_argument('--name', '-n', required=True, action='store', help='Name') arg_parser.add_argument('--rgname', '-g', required=True, action='store', help='Resource Group Name') arg_parser.add_argument('--location', '-l', required=True, action='store', help='Location, e.g. eastus') arg_parser.add_argument('--verbose', '-v', action='store_true', default=False, help='Print operational details') args = arg_parser.parse_args() name = args.name rgname = args.rgname location = args.location if args.add is True and args.delete is True: sys.exit('Specify --add or --delete, not both.') # depends on [control=['if'], data=[]] if args.add is False and args.delete is False: sys.exit('No operation specified, use --add or --delete.') # depends on [control=['if'], data=[]] if 'ACC_CLOUD' in os.environ and 'MSI_ENDPOINT' in os.environ: endpoint = os.environ['MSI_ENDPOINT'] # depends on [control=['if'], data=[]] else: sys.exit('Not running in cloud shell or MSI_ENDPOINT not set') # get Azure auth token if args.verbose is True: print('Getting Azure token from MSI endpoint..') # depends on [control=['if'], data=[]] access_token = azurerm.get_access_token_from_cli() if args.verbose is True: print('Getting Azure subscription ID from MSI endpoint..') # depends on [control=['if'], data=[]] subscription_id = azurerm.get_subscription_from_cli() # execute specified operation if args.add is True: # create a key vault # get Azure tenant ID if args.verbose is True: print('Getting list of tenant IDs...') # depends on [control=['if'], data=[]] tenants = azurerm.list_tenants(access_token) tenant_id = tenants['value'][0]['tenantId'] if args.verbose is True: print('My tenantId = ' + tenant_id) # depends on [control=['if'], data=[]] # get Graph object ID if args.verbose is True: print('Querying graph...') # depends on [control=['if'], data=[]] object_id = azurerm.get_object_id_from_graph() if args.verbose is True: print('My object ID = ' + object_id) # depends on [control=['if'], data=[]] # create key vault ret = azurerm.create_keyvault(access_token, subscription_id, rgname, name, location, tenant_id=tenant_id, object_id=object_id) if ret.status_code == 200: print('Successsfully created key vault: ' + name) print('Vault URI: ' + ret.json()['properties']['vaultUri']) # depends on [control=['if'], data=[]] else: print('Return code ' + str(ret.status_code) + ' from create_keyvault().') print(ret.text) # depends on [control=['if'], data=[]] else: # else delete named key vault ret = azurerm.delete_keyvault(access_token, subscription_id, rgname, name) if ret.status_code == 200: print('Successsfully deleted key vault: ' + name) # depends on [control=['if'], data=[]] else: print('Return code ' + str(ret.status_code) + ' from delete_keyvault().') print(ret.text)
def generate_raml_docs(module, fields, shared_types, user=None, title="My API", version="v1", api_root="api", base_uri="http://mysite.com/{version}"): """Return a RAML file of a Pale module's documentation as a string. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. The arguments for 'title', 'version', and 'base_uri' are added to the RAML header info. """ output = StringIO() # Add the RAML header info output.write('#%RAML 1.0 \n') output.write('title: ' + title + ' \n') output.write('baseUri: ' + base_uri + ' \n') output.write('version: ' + version + '\n') output.write('mediaType: application/json\n\n') output.write('documentation:\n') output.write(' - title: Welcome\n') output.write(' content: |\n') output.write("""\ Welcome to the Loudr API Docs.\n You'll find comprehensive documentation on our endpoints and resources here. """) output.write("\n###############\n# Resource Types:\n###############\n\n") output.write('types:\n') basic_fields = [] for field_module in inspect.getmembers(fields, inspect.ismodule): for field_class in inspect.getmembers(field_module[1], inspect.isclass): basic_fields.append(field_class[1]) pale_basic_types = generate_basic_type_docs(basic_fields, {}) output.write("\n# Pale Basic Types:\n\n") output.write(pale_basic_types[0]) shared_fields = [] for shared_type in shared_types: for field_class in inspect.getmembers(shared_type, inspect.isclass): shared_fields.append(field_class[1]) pale_shared_types = generate_basic_type_docs(shared_fields, pale_basic_types[1]) output.write("\n# Pale Shared Types:\n\n") output.write(pale_shared_types[0]) raml_resource_types = generate_raml_resource_types(module) output.write("\n# API Resource Types:\n\n") output.write(raml_resource_types) raml_resources = generate_raml_resources(module, api_root, user) output.write("\n\n###############\n# API Endpoints:\n###############\n\n") output.write(raml_resources) raml_docs = output.getvalue() output.close() return raml_docs
def function[generate_raml_docs, parameter[module, fields, shared_types, user, title, version, api_root, base_uri]]: constant[Return a RAML file of a Pale module's documentation as a string. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. The arguments for 'title', 'version', and 'base_uri' are added to the RAML header info. ] variable[output] assign[=] call[name[StringIO], parameter[]] call[name[output].write, parameter[constant[#%RAML 1.0 ]]] call[name[output].write, parameter[binary_operation[binary_operation[constant[title: ] + name[title]] + constant[ ]]]] call[name[output].write, parameter[binary_operation[binary_operation[constant[baseUri: ] + name[base_uri]] + constant[ ]]]] call[name[output].write, parameter[binary_operation[binary_operation[constant[version: ] + name[version]] + constant[ ]]]] call[name[output].write, parameter[constant[mediaType: application/json ]]] call[name[output].write, parameter[constant[documentation: ]]] call[name[output].write, parameter[constant[ - title: Welcome ]]] call[name[output].write, parameter[constant[ content: | ]]] call[name[output].write, parameter[constant[ Welcome to the Loudr API Docs. You'll find comprehensive documentation on our endpoints and resources here. ]]] call[name[output].write, parameter[constant[ ############### # Resource Types: ############### ]]] call[name[output].write, parameter[constant[types: ]]] variable[basic_fields] assign[=] list[[]] for taget[name[field_module]] in starred[call[name[inspect].getmembers, parameter[name[fields], name[inspect].ismodule]]] begin[:] for taget[name[field_class]] in starred[call[name[inspect].getmembers, parameter[call[name[field_module]][constant[1]], name[inspect].isclass]]] begin[:] call[name[basic_fields].append, parameter[call[name[field_class]][constant[1]]]] variable[pale_basic_types] assign[=] call[name[generate_basic_type_docs], parameter[name[basic_fields], dictionary[[], []]]] call[name[output].write, parameter[constant[ # Pale Basic Types: ]]] call[name[output].write, parameter[call[name[pale_basic_types]][constant[0]]]] variable[shared_fields] assign[=] list[[]] for taget[name[shared_type]] in starred[name[shared_types]] begin[:] for taget[name[field_class]] in starred[call[name[inspect].getmembers, parameter[name[shared_type], name[inspect].isclass]]] begin[:] call[name[shared_fields].append, parameter[call[name[field_class]][constant[1]]]] variable[pale_shared_types] assign[=] call[name[generate_basic_type_docs], parameter[name[shared_fields], call[name[pale_basic_types]][constant[1]]]] call[name[output].write, parameter[constant[ # Pale Shared Types: ]]] call[name[output].write, parameter[call[name[pale_shared_types]][constant[0]]]] variable[raml_resource_types] assign[=] call[name[generate_raml_resource_types], parameter[name[module]]] call[name[output].write, parameter[constant[ # API Resource Types: ]]] call[name[output].write, parameter[name[raml_resource_types]]] variable[raml_resources] assign[=] call[name[generate_raml_resources], parameter[name[module], name[api_root], name[user]]] call[name[output].write, parameter[constant[ ############### # API Endpoints: ############### ]]] call[name[output].write, parameter[name[raml_resources]]] variable[raml_docs] assign[=] call[name[output].getvalue, parameter[]] call[name[output].close, parameter[]] return[name[raml_docs]]
keyword[def] identifier[generate_raml_docs] ( identifier[module] , identifier[fields] , identifier[shared_types] , identifier[user] = keyword[None] , identifier[title] = literal[string] , identifier[version] = literal[string] , identifier[api_root] = literal[string] , identifier[base_uri] = literal[string] ): literal[string] identifier[output] = identifier[StringIO] () identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] + identifier[title] + literal[string] ) identifier[output] . identifier[write] ( literal[string] + identifier[base_uri] + literal[string] ) identifier[output] . identifier[write] ( literal[string] + identifier[version] + literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( literal[string] ) identifier[basic_fields] =[] keyword[for] identifier[field_module] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[fields] , identifier[inspect] . identifier[ismodule] ): keyword[for] identifier[field_class] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[field_module] [ literal[int] ], identifier[inspect] . identifier[isclass] ): identifier[basic_fields] . identifier[append] ( identifier[field_class] [ literal[int] ]) identifier[pale_basic_types] = identifier[generate_basic_type_docs] ( identifier[basic_fields] ,{}) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( identifier[pale_basic_types] [ literal[int] ]) identifier[shared_fields] =[] keyword[for] identifier[shared_type] keyword[in] identifier[shared_types] : keyword[for] identifier[field_class] keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[shared_type] , identifier[inspect] . identifier[isclass] ): identifier[shared_fields] . identifier[append] ( identifier[field_class] [ literal[int] ]) identifier[pale_shared_types] = identifier[generate_basic_type_docs] ( identifier[shared_fields] , identifier[pale_basic_types] [ literal[int] ]) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( identifier[pale_shared_types] [ literal[int] ]) identifier[raml_resource_types] = identifier[generate_raml_resource_types] ( identifier[module] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( identifier[raml_resource_types] ) identifier[raml_resources] = identifier[generate_raml_resources] ( identifier[module] , identifier[api_root] , identifier[user] ) identifier[output] . identifier[write] ( literal[string] ) identifier[output] . identifier[write] ( identifier[raml_resources] ) identifier[raml_docs] = identifier[output] . identifier[getvalue] () identifier[output] . identifier[close] () keyword[return] identifier[raml_docs]
def generate_raml_docs(module, fields, shared_types, user=None, title='My API', version='v1', api_root='api', base_uri='http://mysite.com/{version}'): """Return a RAML file of a Pale module's documentation as a string. The user argument is optional. If included, it expects the user to be an object with an "is_admin" boolean attribute. Any endpoint protected with a "@requires_permission" decorator will require user.is_admin == True to display documentation on that endpoint. The arguments for 'title', 'version', and 'base_uri' are added to the RAML header info. """ output = StringIO() # Add the RAML header info output.write('#%RAML 1.0 \n') output.write('title: ' + title + ' \n') output.write('baseUri: ' + base_uri + ' \n') output.write('version: ' + version + '\n') output.write('mediaType: application/json\n\n') output.write('documentation:\n') output.write(' - title: Welcome\n') output.write(' content: |\n') output.write(" Welcome to the Loudr API Docs.\n\n You'll find comprehensive documentation on our endpoints and resources here.\n ") output.write('\n###############\n# Resource Types:\n###############\n\n') output.write('types:\n') basic_fields = [] for field_module in inspect.getmembers(fields, inspect.ismodule): for field_class in inspect.getmembers(field_module[1], inspect.isclass): basic_fields.append(field_class[1]) # depends on [control=['for'], data=['field_class']] # depends on [control=['for'], data=['field_module']] pale_basic_types = generate_basic_type_docs(basic_fields, {}) output.write('\n# Pale Basic Types:\n\n') output.write(pale_basic_types[0]) shared_fields = [] for shared_type in shared_types: for field_class in inspect.getmembers(shared_type, inspect.isclass): shared_fields.append(field_class[1]) # depends on [control=['for'], data=['field_class']] # depends on [control=['for'], data=['shared_type']] pale_shared_types = generate_basic_type_docs(shared_fields, pale_basic_types[1]) output.write('\n# Pale Shared Types:\n\n') output.write(pale_shared_types[0]) raml_resource_types = generate_raml_resource_types(module) output.write('\n# API Resource Types:\n\n') output.write(raml_resource_types) raml_resources = generate_raml_resources(module, api_root, user) output.write('\n\n###############\n# API Endpoints:\n###############\n\n') output.write(raml_resources) raml_docs = output.getvalue() output.close() return raml_docs
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None): """ Decorator for views that checks that the user is logged in, redirecting to the log in page if necessary. """ def decorator(view_func): @functools.wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if is_authenticated(request.user): return view_func(request, *args, **kwargs) return handle_redirect_to_login( request, redirect_field_name=redirect_field_name, login_url=login_url ) return _wrapped_view if func: return decorator(func) return decorator
def function[login_required, parameter[func, redirect_field_name, login_url]]: constant[ Decorator for views that checks that the user is logged in, redirecting to the log in page if necessary. ] def function[decorator, parameter[view_func]]: def function[_wrapped_view, parameter[request]]: if call[name[is_authenticated], parameter[name[request].user]] begin[:] return[call[name[view_func], parameter[name[request], <ast.Starred object at 0x7da20c796020>]]] return[call[name[handle_redirect_to_login], parameter[name[request]]]] return[name[_wrapped_view]] if name[func] begin[:] return[call[name[decorator], parameter[name[func]]]] return[name[decorator]]
keyword[def] identifier[login_required] ( identifier[func] = keyword[None] , identifier[redirect_field_name] = identifier[REDIRECT_FIELD_NAME] , identifier[login_url] = keyword[None] ): literal[string] keyword[def] identifier[decorator] ( identifier[view_func] ): @ identifier[functools] . identifier[wraps] ( identifier[view_func] , identifier[assigned] = identifier[available_attrs] ( identifier[view_func] )) keyword[def] identifier[_wrapped_view] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ): keyword[if] identifier[is_authenticated] ( identifier[request] . identifier[user] ): keyword[return] identifier[view_func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[handle_redirect_to_login] ( identifier[request] , identifier[redirect_field_name] = identifier[redirect_field_name] , identifier[login_url] = identifier[login_url] ) keyword[return] identifier[_wrapped_view] keyword[if] identifier[func] : keyword[return] identifier[decorator] ( identifier[func] ) keyword[return] identifier[decorator]
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None): """ Decorator for views that checks that the user is logged in, redirecting to the log in page if necessary. """ def decorator(view_func): @functools.wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if is_authenticated(request.user): return view_func(request, *args, **kwargs) # depends on [control=['if'], data=[]] return handle_redirect_to_login(request, redirect_field_name=redirect_field_name, login_url=login_url) return _wrapped_view if func: return decorator(func) # depends on [control=['if'], data=[]] return decorator
def get_params(): """ get the cmdline params """ parser = argparse.ArgumentParser() parser.add_argument("--connect-timeout", type=float, default=10.0, help="ZK connect timeout") parser.add_argument("--run-once", type=str, default="", help="Run a command non-interactively and exit") parser.add_argument("--run-from-stdin", action="store_true", default=False, help="Read cmds from stdin, run them and exit") parser.add_argument("--sync-connect", action="store_true", default=False, help="Connect synchronously.") parser.add_argument("--readonly", action="store_true", default=False, help="Enable readonly.") parser.add_argument("--tunnel", type=str, help="Create a ssh tunnel via this host", default=None) parser.add_argument("--version", action="store_true", default=False, help="Display version and exit.") parser.add_argument("hosts", nargs="*", help="ZK hosts to connect") params = parser.parse_args() return CLIParams( params.connect_timeout, params.run_once, params.run_from_stdin, params.sync_connect, params.hosts, params.readonly, params.tunnel, params.version )
def function[get_params, parameter[]]: constant[ get the cmdline params ] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[--connect-timeout]]] call[name[parser].add_argument, parameter[constant[--run-once]]] call[name[parser].add_argument, parameter[constant[--run-from-stdin]]] call[name[parser].add_argument, parameter[constant[--sync-connect]]] call[name[parser].add_argument, parameter[constant[--readonly]]] call[name[parser].add_argument, parameter[constant[--tunnel]]] call[name[parser].add_argument, parameter[constant[--version]]] call[name[parser].add_argument, parameter[constant[hosts]]] variable[params] assign[=] call[name[parser].parse_args, parameter[]] return[call[name[CLIParams], parameter[name[params].connect_timeout, name[params].run_once, name[params].run_from_stdin, name[params].sync_connect, name[params].hosts, name[params].readonly, name[params].tunnel, name[params].version]]]
keyword[def] identifier[get_params] (): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] () identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[float] , identifier[default] = literal[int] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] , identifier[default] = keyword[None] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[nargs] = literal[string] , identifier[help] = literal[string] ) identifier[params] = identifier[parser] . identifier[parse_args] () keyword[return] identifier[CLIParams] ( identifier[params] . identifier[connect_timeout] , identifier[params] . identifier[run_once] , identifier[params] . identifier[run_from_stdin] , identifier[params] . identifier[sync_connect] , identifier[params] . identifier[hosts] , identifier[params] . identifier[readonly] , identifier[params] . identifier[tunnel] , identifier[params] . identifier[version] )
def get_params(): """ get the cmdline params """ parser = argparse.ArgumentParser() parser.add_argument('--connect-timeout', type=float, default=10.0, help='ZK connect timeout') parser.add_argument('--run-once', type=str, default='', help='Run a command non-interactively and exit') parser.add_argument('--run-from-stdin', action='store_true', default=False, help='Read cmds from stdin, run them and exit') parser.add_argument('--sync-connect', action='store_true', default=False, help='Connect synchronously.') parser.add_argument('--readonly', action='store_true', default=False, help='Enable readonly.') parser.add_argument('--tunnel', type=str, help='Create a ssh tunnel via this host', default=None) parser.add_argument('--version', action='store_true', default=False, help='Display version and exit.') parser.add_argument('hosts', nargs='*', help='ZK hosts to connect') params = parser.parse_args() return CLIParams(params.connect_timeout, params.run_once, params.run_from_stdin, params.sync_connect, params.hosts, params.readonly, params.tunnel, params.version)
def isActiveDashboardOverlay(self, ulOverlayHandle): """returns true if the dashboard is visible and the specified overlay is the active system Overlay""" fn = self.function_table.isActiveDashboardOverlay result = fn(ulOverlayHandle) return result
def function[isActiveDashboardOverlay, parameter[self, ulOverlayHandle]]: constant[returns true if the dashboard is visible and the specified overlay is the active system Overlay] variable[fn] assign[=] name[self].function_table.isActiveDashboardOverlay variable[result] assign[=] call[name[fn], parameter[name[ulOverlayHandle]]] return[name[result]]
keyword[def] identifier[isActiveDashboardOverlay] ( identifier[self] , identifier[ulOverlayHandle] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[isActiveDashboardOverlay] identifier[result] = identifier[fn] ( identifier[ulOverlayHandle] ) keyword[return] identifier[result]
def isActiveDashboardOverlay(self, ulOverlayHandle): """returns true if the dashboard is visible and the specified overlay is the active system Overlay""" fn = self.function_table.isActiveDashboardOverlay result = fn(ulOverlayHandle) return result
def _set_selected_action(self, action): """Setter method for the selected_action property.""" assert action in self._action_sets if self._selected_action is not None: raise ValueError("The action has already been selected.") self._selected_action = action
def function[_set_selected_action, parameter[self, action]]: constant[Setter method for the selected_action property.] assert[compare[name[action] in name[self]._action_sets]] if compare[name[self]._selected_action is_not constant[None]] begin[:] <ast.Raise object at 0x7da1b0fe9060> name[self]._selected_action assign[=] name[action]
keyword[def] identifier[_set_selected_action] ( identifier[self] , identifier[action] ): literal[string] keyword[assert] identifier[action] keyword[in] identifier[self] . identifier[_action_sets] keyword[if] identifier[self] . identifier[_selected_action] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[_selected_action] = identifier[action]
def _set_selected_action(self, action): """Setter method for the selected_action property.""" assert action in self._action_sets if self._selected_action is not None: raise ValueError('The action has already been selected.') # depends on [control=['if'], data=[]] self._selected_action = action
def imread(filename): ''' Like cv2.imread This function will make sure filename exists ''' im = cv2.imread(filename) if im is None: raise RuntimeError("file: '%s' not exists" % filename) return im
def function[imread, parameter[filename]]: constant[ Like cv2.imread This function will make sure filename exists ] variable[im] assign[=] call[name[cv2].imread, parameter[name[filename]]] if compare[name[im] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6a9ff0> return[name[im]]
keyword[def] identifier[imread] ( identifier[filename] ): literal[string] identifier[im] = identifier[cv2] . identifier[imread] ( identifier[filename] ) keyword[if] identifier[im] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[filename] ) keyword[return] identifier[im]
def imread(filename): """ Like cv2.imread This function will make sure filename exists """ im = cv2.imread(filename) if im is None: raise RuntimeError("file: '%s' not exists" % filename) # depends on [control=['if'], data=[]] return im
def encode(self, obj, qualifier: str): """Encodes a dictionary-like object to a Lua string :param qualifier: :param obj: object to encode :return: valid Lua string """ LOGGER.debug('encoding dictionary to text') if not obj: if qualifier.replace('=', '').rstrip() == 'mapResource': # Accept empty mapResource return '{}\n{{\n}} -- end of {}\n'.format(qualifier, qualifier.replace('=', '').rstrip()) else: LOGGER.error('{}\n{{\n}} -- end of {}\n'.format(qualifier, qualifier.replace('=', '').rstrip())) raise SLTPEmptyObjectError(qualifier) self.depth = 0 out = [] s = self.__encode(obj) lines = s.split(self.newline) for line in lines: m = self.line_end.match(line) if m: out.append('{},{}'.format(m.group('intro'), m.group('comment'))) else: out.append(line) return '{}{} -- end of {}\n'.format(qualifier, self.newline.join(out), qualifier.replace('=', '').rstrip())
def function[encode, parameter[self, obj, qualifier]]: constant[Encodes a dictionary-like object to a Lua string :param qualifier: :param obj: object to encode :return: valid Lua string ] call[name[LOGGER].debug, parameter[constant[encoding dictionary to text]]] if <ast.UnaryOp object at 0x7da1b1407340> begin[:] if compare[call[call[name[qualifier].replace, parameter[constant[=], constant[]]].rstrip, parameter[]] equal[==] constant[mapResource]] begin[:] return[call[constant[{} {{ }} -- end of {} ].format, parameter[name[qualifier], call[call[name[qualifier].replace, parameter[constant[=], constant[]]].rstrip, parameter[]]]]] name[self].depth assign[=] constant[0] variable[out] assign[=] list[[]] variable[s] assign[=] call[name[self].__encode, parameter[name[obj]]] variable[lines] assign[=] call[name[s].split, parameter[name[self].newline]] for taget[name[line]] in starred[name[lines]] begin[:] variable[m] assign[=] call[name[self].line_end.match, parameter[name[line]]] if name[m] begin[:] call[name[out].append, parameter[call[constant[{},{}].format, parameter[call[name[m].group, parameter[constant[intro]]], call[name[m].group, parameter[constant[comment]]]]]]] return[call[constant[{}{} -- end of {} ].format, parameter[name[qualifier], call[name[self].newline.join, parameter[name[out]]], call[call[name[qualifier].replace, parameter[constant[=], constant[]]].rstrip, parameter[]]]]]
keyword[def] identifier[encode] ( identifier[self] , identifier[obj] , identifier[qualifier] : identifier[str] ): literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] ) keyword[if] keyword[not] identifier[obj] : keyword[if] identifier[qualifier] . identifier[replace] ( literal[string] , literal[string] ). identifier[rstrip] ()== literal[string] : keyword[return] literal[string] . identifier[format] ( identifier[qualifier] , identifier[qualifier] . identifier[replace] ( literal[string] , literal[string] ). identifier[rstrip] ()) keyword[else] : identifier[LOGGER] . identifier[error] ( literal[string] . identifier[format] ( identifier[qualifier] , identifier[qualifier] . identifier[replace] ( literal[string] , literal[string] ). identifier[rstrip] ())) keyword[raise] identifier[SLTPEmptyObjectError] ( identifier[qualifier] ) identifier[self] . identifier[depth] = literal[int] identifier[out] =[] identifier[s] = identifier[self] . identifier[__encode] ( identifier[obj] ) identifier[lines] = identifier[s] . identifier[split] ( identifier[self] . identifier[newline] ) keyword[for] identifier[line] keyword[in] identifier[lines] : identifier[m] = identifier[self] . identifier[line_end] . identifier[match] ( identifier[line] ) keyword[if] identifier[m] : identifier[out] . identifier[append] ( literal[string] . identifier[format] ( identifier[m] . identifier[group] ( literal[string] ), identifier[m] . identifier[group] ( literal[string] ))) keyword[else] : identifier[out] . identifier[append] ( identifier[line] ) keyword[return] literal[string] . identifier[format] ( identifier[qualifier] , identifier[self] . identifier[newline] . identifier[join] ( identifier[out] ), identifier[qualifier] . identifier[replace] ( literal[string] , literal[string] ). identifier[rstrip] ())
def encode(self, obj, qualifier: str): """Encodes a dictionary-like object to a Lua string :param qualifier: :param obj: object to encode :return: valid Lua string """ LOGGER.debug('encoding dictionary to text') if not obj: if qualifier.replace('=', '').rstrip() == 'mapResource': # Accept empty mapResource return '{}\n{{\n}} -- end of {}\n'.format(qualifier, qualifier.replace('=', '').rstrip()) # depends on [control=['if'], data=[]] else: LOGGER.error('{}\n{{\n}} -- end of {}\n'.format(qualifier, qualifier.replace('=', '').rstrip())) raise SLTPEmptyObjectError(qualifier) # depends on [control=['if'], data=[]] self.depth = 0 out = [] s = self.__encode(obj) lines = s.split(self.newline) for line in lines: m = self.line_end.match(line) if m: out.append('{},{}'.format(m.group('intro'), m.group('comment'))) # depends on [control=['if'], data=[]] else: out.append(line) # depends on [control=['for'], data=['line']] return '{}{} -- end of {}\n'.format(qualifier, self.newline.join(out), qualifier.replace('=', '').rstrip())
def _computeforceArray(self,dr_dx, dtheta_dx, dphi_dx, R, z, phi): """ NAME: _computeforceArray PURPOSE: evaluate the forces in the x direction for a given array of coordinates INPUT: dr_dx - the derivative of r with respect to the chosen variable x dtheta_dx - the derivative of theta with respect to the chosen variable x dphi_dx - the derivative of phi with respect to the chosen variable x R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: The forces in the x direction HISTORY: 2016-06-02 - Written - Aladdin """ R = nu.array(R,dtype=float); z = nu.array(z,dtype=float); phi = nu.array(phi,dtype=float); shape = (R*z*phi).shape if shape == (): dPhi_dr,dPhi_dtheta,dPhi_dphi = \ self._computeforce(R,z,phi) return dr_dx*dPhi_dr + dtheta_dx*dPhi_dtheta +dPhi_dphi*dphi_dx R = R*nu.ones(shape); z = z* nu.ones(shape); phi = phi* nu.ones(shape); force = nu.zeros(shape, float) dr_dx = dr_dx*nu.ones(shape); dtheta_dx = dtheta_dx*nu.ones(shape);dphi_dx = dphi_dx*nu.ones(shape); li = _cartesian(shape) for i in range(li.shape[0]): j = tuple(nu.split(li[i], li.shape[1])) dPhi_dr,dPhi_dtheta,dPhi_dphi = \ self._computeforce(R[j][0],z[j][0],phi[j][0]) force[j] = dr_dx[j][0]*dPhi_dr + dtheta_dx[j][0]*dPhi_dtheta +dPhi_dphi*dphi_dx[j][0] return force
def function[_computeforceArray, parameter[self, dr_dx, dtheta_dx, dphi_dx, R, z, phi]]: constant[ NAME: _computeforceArray PURPOSE: evaluate the forces in the x direction for a given array of coordinates INPUT: dr_dx - the derivative of r with respect to the chosen variable x dtheta_dx - the derivative of theta with respect to the chosen variable x dphi_dx - the derivative of phi with respect to the chosen variable x R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: The forces in the x direction HISTORY: 2016-06-02 - Written - Aladdin ] variable[R] assign[=] call[name[nu].array, parameter[name[R]]] variable[z] assign[=] call[name[nu].array, parameter[name[z]]] variable[phi] assign[=] call[name[nu].array, parameter[name[phi]]] variable[shape] assign[=] binary_operation[binary_operation[name[R] * name[z]] * name[phi]].shape if compare[name[shape] equal[==] tuple[[]]] begin[:] <ast.Tuple object at 0x7da20c794c10> assign[=] call[name[self]._computeforce, parameter[name[R], name[z], name[phi]]] return[binary_operation[binary_operation[binary_operation[name[dr_dx] * name[dPhi_dr]] + binary_operation[name[dtheta_dx] * name[dPhi_dtheta]]] + binary_operation[name[dPhi_dphi] * name[dphi_dx]]]] variable[R] assign[=] binary_operation[name[R] * call[name[nu].ones, parameter[name[shape]]]] variable[z] assign[=] binary_operation[name[z] * call[name[nu].ones, parameter[name[shape]]]] variable[phi] assign[=] binary_operation[name[phi] * call[name[nu].ones, parameter[name[shape]]]] variable[force] assign[=] call[name[nu].zeros, parameter[name[shape], name[float]]] variable[dr_dx] assign[=] binary_operation[name[dr_dx] * call[name[nu].ones, parameter[name[shape]]]] variable[dtheta_dx] assign[=] binary_operation[name[dtheta_dx] * call[name[nu].ones, parameter[name[shape]]]] variable[dphi_dx] assign[=] binary_operation[name[dphi_dx] * call[name[nu].ones, parameter[name[shape]]]] variable[li] assign[=] call[name[_cartesian], parameter[name[shape]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[li].shape][constant[0]]]]] begin[:] variable[j] assign[=] call[name[tuple], parameter[call[name[nu].split, parameter[call[name[li]][name[i]], call[name[li].shape][constant[1]]]]]] <ast.Tuple object at 0x7da18dc99270> assign[=] call[name[self]._computeforce, parameter[call[call[name[R]][name[j]]][constant[0]], call[call[name[z]][name[j]]][constant[0]], call[call[name[phi]][name[j]]][constant[0]]]] call[name[force]][name[j]] assign[=] binary_operation[binary_operation[binary_operation[call[call[name[dr_dx]][name[j]]][constant[0]] * name[dPhi_dr]] + binary_operation[call[call[name[dtheta_dx]][name[j]]][constant[0]] * name[dPhi_dtheta]]] + binary_operation[name[dPhi_dphi] * call[call[name[dphi_dx]][name[j]]][constant[0]]]] return[name[force]]
keyword[def] identifier[_computeforceArray] ( identifier[self] , identifier[dr_dx] , identifier[dtheta_dx] , identifier[dphi_dx] , identifier[R] , identifier[z] , identifier[phi] ): literal[string] identifier[R] = identifier[nu] . identifier[array] ( identifier[R] , identifier[dtype] = identifier[float] ); identifier[z] = identifier[nu] . identifier[array] ( identifier[z] , identifier[dtype] = identifier[float] ); identifier[phi] = identifier[nu] . identifier[array] ( identifier[phi] , identifier[dtype] = identifier[float] ); identifier[shape] =( identifier[R] * identifier[z] * identifier[phi] ). identifier[shape] keyword[if] identifier[shape] ==(): identifier[dPhi_dr] , identifier[dPhi_dtheta] , identifier[dPhi_dphi] = identifier[self] . identifier[_computeforce] ( identifier[R] , identifier[z] , identifier[phi] ) keyword[return] identifier[dr_dx] * identifier[dPhi_dr] + identifier[dtheta_dx] * identifier[dPhi_dtheta] + identifier[dPhi_dphi] * identifier[dphi_dx] identifier[R] = identifier[R] * identifier[nu] . identifier[ones] ( identifier[shape] ); identifier[z] = identifier[z] * identifier[nu] . identifier[ones] ( identifier[shape] ); identifier[phi] = identifier[phi] * identifier[nu] . identifier[ones] ( identifier[shape] ); identifier[force] = identifier[nu] . identifier[zeros] ( identifier[shape] , identifier[float] ) identifier[dr_dx] = identifier[dr_dx] * identifier[nu] . identifier[ones] ( identifier[shape] ); identifier[dtheta_dx] = identifier[dtheta_dx] * identifier[nu] . identifier[ones] ( identifier[shape] ); identifier[dphi_dx] = identifier[dphi_dx] * identifier[nu] . identifier[ones] ( identifier[shape] ); identifier[li] = identifier[_cartesian] ( identifier[shape] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[li] . identifier[shape] [ literal[int] ]): identifier[j] = identifier[tuple] ( identifier[nu] . identifier[split] ( identifier[li] [ identifier[i] ], identifier[li] . identifier[shape] [ literal[int] ])) identifier[dPhi_dr] , identifier[dPhi_dtheta] , identifier[dPhi_dphi] = identifier[self] . identifier[_computeforce] ( identifier[R] [ identifier[j] ][ literal[int] ], identifier[z] [ identifier[j] ][ literal[int] ], identifier[phi] [ identifier[j] ][ literal[int] ]) identifier[force] [ identifier[j] ]= identifier[dr_dx] [ identifier[j] ][ literal[int] ]* identifier[dPhi_dr] + identifier[dtheta_dx] [ identifier[j] ][ literal[int] ]* identifier[dPhi_dtheta] + identifier[dPhi_dphi] * identifier[dphi_dx] [ identifier[j] ][ literal[int] ] keyword[return] identifier[force]
def _computeforceArray(self, dr_dx, dtheta_dx, dphi_dx, R, z, phi): """ NAME: _computeforceArray PURPOSE: evaluate the forces in the x direction for a given array of coordinates INPUT: dr_dx - the derivative of r with respect to the chosen variable x dtheta_dx - the derivative of theta with respect to the chosen variable x dphi_dx - the derivative of phi with respect to the chosen variable x R - Cylindrical Galactocentric radius z - vertical height phi - azimuth t - time OUTPUT: The forces in the x direction HISTORY: 2016-06-02 - Written - Aladdin """ R = nu.array(R, dtype=float) z = nu.array(z, dtype=float) phi = nu.array(phi, dtype=float) shape = (R * z * phi).shape if shape == (): (dPhi_dr, dPhi_dtheta, dPhi_dphi) = self._computeforce(R, z, phi) return dr_dx * dPhi_dr + dtheta_dx * dPhi_dtheta + dPhi_dphi * dphi_dx # depends on [control=['if'], data=[]] R = R * nu.ones(shape) z = z * nu.ones(shape) phi = phi * nu.ones(shape) force = nu.zeros(shape, float) dr_dx = dr_dx * nu.ones(shape) dtheta_dx = dtheta_dx * nu.ones(shape) dphi_dx = dphi_dx * nu.ones(shape) li = _cartesian(shape) for i in range(li.shape[0]): j = tuple(nu.split(li[i], li.shape[1])) (dPhi_dr, dPhi_dtheta, dPhi_dphi) = self._computeforce(R[j][0], z[j][0], phi[j][0]) force[j] = dr_dx[j][0] * dPhi_dr + dtheta_dx[j][0] * dPhi_dtheta + dPhi_dphi * dphi_dx[j][0] # depends on [control=['for'], data=['i']] return force
def call_once(self, request=None, *args, **kwargs): """ Performs one API request. Raises exception on failure. :param request: :param args: :param kwargs: :return: response """ if request is not None: self.request = request config = self.request.configuration if config.http_method != EBConsts.HTTP_METHOD_POST or config.method != EBConsts.METHOD_REST: raise Error('Not implemented yet, only REST POST method is allowed') url = self.request.url if self.request.url is not None else self.build_url() logger.debug("URL to call: %s", url) # Do the request resp = requests.post(url, json=self.request.body, timeout=config.timeout, headers=self.request.headers) self.last_resp = resp return self.check_response(resp)
def function[call_once, parameter[self, request]]: constant[ Performs one API request. Raises exception on failure. :param request: :param args: :param kwargs: :return: response ] if compare[name[request] is_not constant[None]] begin[:] name[self].request assign[=] name[request] variable[config] assign[=] name[self].request.configuration if <ast.BoolOp object at 0x7da20c6ab0d0> begin[:] <ast.Raise object at 0x7da1b14d7ac0> variable[url] assign[=] <ast.IfExp object at 0x7da1b14d56c0> call[name[logger].debug, parameter[constant[URL to call: %s], name[url]]] variable[resp] assign[=] call[name[requests].post, parameter[name[url]]] name[self].last_resp assign[=] name[resp] return[call[name[self].check_response, parameter[name[resp]]]]
keyword[def] identifier[call_once] ( identifier[self] , identifier[request] = keyword[None] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[request] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[request] = identifier[request] identifier[config] = identifier[self] . identifier[request] . identifier[configuration] keyword[if] identifier[config] . identifier[http_method] != identifier[EBConsts] . identifier[HTTP_METHOD_POST] keyword[or] identifier[config] . identifier[method] != identifier[EBConsts] . identifier[METHOD_REST] : keyword[raise] identifier[Error] ( literal[string] ) identifier[url] = identifier[self] . identifier[request] . identifier[url] keyword[if] identifier[self] . identifier[request] . identifier[url] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[build_url] () identifier[logger] . identifier[debug] ( literal[string] , identifier[url] ) identifier[resp] = identifier[requests] . identifier[post] ( identifier[url] , identifier[json] = identifier[self] . identifier[request] . identifier[body] , identifier[timeout] = identifier[config] . identifier[timeout] , identifier[headers] = identifier[self] . identifier[request] . identifier[headers] ) identifier[self] . identifier[last_resp] = identifier[resp] keyword[return] identifier[self] . identifier[check_response] ( identifier[resp] )
def call_once(self, request=None, *args, **kwargs): """ Performs one API request. Raises exception on failure. :param request: :param args: :param kwargs: :return: response """ if request is not None: self.request = request # depends on [control=['if'], data=['request']] config = self.request.configuration if config.http_method != EBConsts.HTTP_METHOD_POST or config.method != EBConsts.METHOD_REST: raise Error('Not implemented yet, only REST POST method is allowed') # depends on [control=['if'], data=[]] url = self.request.url if self.request.url is not None else self.build_url() logger.debug('URL to call: %s', url) # Do the request resp = requests.post(url, json=self.request.body, timeout=config.timeout, headers=self.request.headers) self.last_resp = resp return self.check_response(resp)
def read_memory(self, addr, transfer_size=32, now=True): """! @brief Read a memory location. By default, a word will be read. """ assert transfer_size in (8, 16, 32) if transfer_size == 32: result = conversion.byte_list_to_u32le_list(self._link.read_mem32(addr, 4, self._apsel))[0] elif transfer_size == 16: result = conversion.byte_list_to_u16le_list(self._link.read_mem16(addr, 2, self._apsel))[0] elif transfer_size == 8: result = self._link.read_mem8(addr, 1, self._apsel)[0] def read_callback(): return result return result if now else read_callback
def function[read_memory, parameter[self, addr, transfer_size, now]]: constant[! @brief Read a memory location. By default, a word will be read. ] assert[compare[name[transfer_size] in tuple[[<ast.Constant object at 0x7da1b18ae1d0>, <ast.Constant object at 0x7da1b18aef80>, <ast.Constant object at 0x7da1b18ac700>]]]] if compare[name[transfer_size] equal[==] constant[32]] begin[:] variable[result] assign[=] call[call[name[conversion].byte_list_to_u32le_list, parameter[call[name[self]._link.read_mem32, parameter[name[addr], constant[4], name[self]._apsel]]]]][constant[0]] def function[read_callback, parameter[]]: return[name[result]] return[<ast.IfExp object at 0x7da1b18ace20>]
keyword[def] identifier[read_memory] ( identifier[self] , identifier[addr] , identifier[transfer_size] = literal[int] , identifier[now] = keyword[True] ): literal[string] keyword[assert] identifier[transfer_size] keyword[in] ( literal[int] , literal[int] , literal[int] ) keyword[if] identifier[transfer_size] == literal[int] : identifier[result] = identifier[conversion] . identifier[byte_list_to_u32le_list] ( identifier[self] . identifier[_link] . identifier[read_mem32] ( identifier[addr] , literal[int] , identifier[self] . identifier[_apsel] ))[ literal[int] ] keyword[elif] identifier[transfer_size] == literal[int] : identifier[result] = identifier[conversion] . identifier[byte_list_to_u16le_list] ( identifier[self] . identifier[_link] . identifier[read_mem16] ( identifier[addr] , literal[int] , identifier[self] . identifier[_apsel] ))[ literal[int] ] keyword[elif] identifier[transfer_size] == literal[int] : identifier[result] = identifier[self] . identifier[_link] . identifier[read_mem8] ( identifier[addr] , literal[int] , identifier[self] . identifier[_apsel] )[ literal[int] ] keyword[def] identifier[read_callback] (): keyword[return] identifier[result] keyword[return] identifier[result] keyword[if] identifier[now] keyword[else] identifier[read_callback]
def read_memory(self, addr, transfer_size=32, now=True): """! @brief Read a memory location. By default, a word will be read. """ assert transfer_size in (8, 16, 32) if transfer_size == 32: result = conversion.byte_list_to_u32le_list(self._link.read_mem32(addr, 4, self._apsel))[0] # depends on [control=['if'], data=[]] elif transfer_size == 16: result = conversion.byte_list_to_u16le_list(self._link.read_mem16(addr, 2, self._apsel))[0] # depends on [control=['if'], data=[]] elif transfer_size == 8: result = self._link.read_mem8(addr, 1, self._apsel)[0] # depends on [control=['if'], data=[]] def read_callback(): return result return result if now else read_callback
def get_caption_comments(content): """Retrieve an id and a caption from a code cell. If the code cell content begins with a commented block that looks like ## fig:id # multi-line or single-line # caption then the 'fig:id' and the caption will be returned. The '#' are stripped. """ if not content.startswith('## fig:'): return None, None content = content.splitlines() id = content[0].strip('## ') caption = [] for line in content[1:]: if not line.startswith('# ') or line.startswith('##'): break else: caption.append(line.lstrip('# ').rstrip()) # add " around the caption. TODO: consider doing this upstream # in pandoc-attributes caption = '"' + ' '.join(caption) + '"' return id, caption
def function[get_caption_comments, parameter[content]]: constant[Retrieve an id and a caption from a code cell. If the code cell content begins with a commented block that looks like ## fig:id # multi-line or single-line # caption then the 'fig:id' and the caption will be returned. The '#' are stripped. ] if <ast.UnaryOp object at 0x7da1b12b5ed0> begin[:] return[tuple[[<ast.Constant object at 0x7da1b11aaa10>, <ast.Constant object at 0x7da1b11aaa40>]]] variable[content] assign[=] call[name[content].splitlines, parameter[]] variable[id] assign[=] call[call[name[content]][constant[0]].strip, parameter[constant[## ]]] variable[caption] assign[=] list[[]] for taget[name[line]] in starred[call[name[content]][<ast.Slice object at 0x7da1b11aa620>]] begin[:] if <ast.BoolOp object at 0x7da1b11aa590> begin[:] break variable[caption] assign[=] binary_operation[binary_operation[constant["] + call[constant[ ].join, parameter[name[caption]]]] + constant["]] return[tuple[[<ast.Name object at 0x7da1b11a9d20>, <ast.Name object at 0x7da1b11a9cf0>]]]
keyword[def] identifier[get_caption_comments] ( identifier[content] ): literal[string] keyword[if] keyword[not] identifier[content] . identifier[startswith] ( literal[string] ): keyword[return] keyword[None] , keyword[None] identifier[content] = identifier[content] . identifier[splitlines] () identifier[id] = identifier[content] [ literal[int] ]. identifier[strip] ( literal[string] ) identifier[caption] =[] keyword[for] identifier[line] keyword[in] identifier[content] [ literal[int] :]: keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ) keyword[or] identifier[line] . identifier[startswith] ( literal[string] ): keyword[break] keyword[else] : identifier[caption] . identifier[append] ( identifier[line] . identifier[lstrip] ( literal[string] ). identifier[rstrip] ()) identifier[caption] = literal[string] + literal[string] . identifier[join] ( identifier[caption] )+ literal[string] keyword[return] identifier[id] , identifier[caption]
def get_caption_comments(content): """Retrieve an id and a caption from a code cell. If the code cell content begins with a commented block that looks like ## fig:id # multi-line or single-line # caption then the 'fig:id' and the caption will be returned. The '#' are stripped. """ if not content.startswith('## fig:'): return (None, None) # depends on [control=['if'], data=[]] content = content.splitlines() id = content[0].strip('## ') caption = [] for line in content[1:]: if not line.startswith('# ') or line.startswith('##'): break # depends on [control=['if'], data=[]] else: caption.append(line.lstrip('# ').rstrip()) # depends on [control=['for'], data=['line']] # add " around the caption. TODO: consider doing this upstream # in pandoc-attributes caption = '"' + ' '.join(caption) + '"' return (id, caption)
def deprecated(msg, dep_version): """Decorate a function, method or class to mark as deprecated. Raise DeprecationWarning and add a deprecation notice to the docstring. """ def wrapper(func): docstring = func.__doc__ or '' docstring_msg = '.. deprecated:: {version} {msg}'.format( version=dep_version, msg=msg, ) if docstring: # We don't know how far to indent this message # so instead we just dedent everything. string_list = docstring.splitlines() first_line = string_list[0] remaining = textwrap.dedent(''.join(string_list[1:])) docstring = '\n'.join([ first_line, remaining, '', docstring_msg, ]) else: docstring = docstring_msg func.__doc__ = docstring @wraps(func) def inner(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) return inner return wrapper
def function[deprecated, parameter[msg, dep_version]]: constant[Decorate a function, method or class to mark as deprecated. Raise DeprecationWarning and add a deprecation notice to the docstring. ] def function[wrapper, parameter[func]]: variable[docstring] assign[=] <ast.BoolOp object at 0x7da1b26582b0> variable[docstring_msg] assign[=] call[constant[.. deprecated:: {version} {msg}].format, parameter[]] if name[docstring] begin[:] variable[string_list] assign[=] call[name[docstring].splitlines, parameter[]] variable[first_line] assign[=] call[name[string_list]][constant[0]] variable[remaining] assign[=] call[name[textwrap].dedent, parameter[call[constant[].join, parameter[call[name[string_list]][<ast.Slice object at 0x7da1b26589d0>]]]]] variable[docstring] assign[=] call[constant[ ].join, parameter[list[[<ast.Name object at 0x7da1b2659b10>, <ast.Name object at 0x7da1b265a200>, <ast.Constant object at 0x7da1b26590f0>, <ast.Name object at 0x7da1b2658d00>]]]] name[func].__doc__ assign[=] name[docstring] def function[inner, parameter[]]: call[name[warnings].warn, parameter[name[msg]]] return[call[name[func], parameter[<ast.Starred object at 0x7da1b2658100>]]] return[name[inner]] return[name[wrapper]]
keyword[def] identifier[deprecated] ( identifier[msg] , identifier[dep_version] ): literal[string] keyword[def] identifier[wrapper] ( identifier[func] ): identifier[docstring] = identifier[func] . identifier[__doc__] keyword[or] literal[string] identifier[docstring_msg] = literal[string] . identifier[format] ( identifier[version] = identifier[dep_version] , identifier[msg] = identifier[msg] , ) keyword[if] identifier[docstring] : identifier[string_list] = identifier[docstring] . identifier[splitlines] () identifier[first_line] = identifier[string_list] [ literal[int] ] identifier[remaining] = identifier[textwrap] . identifier[dedent] ( literal[string] . identifier[join] ( identifier[string_list] [ literal[int] :])) identifier[docstring] = literal[string] . identifier[join] ([ identifier[first_line] , identifier[remaining] , literal[string] , identifier[docstring_msg] , ]) keyword[else] : identifier[docstring] = identifier[docstring_msg] identifier[func] . identifier[__doc__] = identifier[docstring] @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[inner] (* identifier[args] ,** identifier[kwargs] ): identifier[warnings] . identifier[warn] ( identifier[msg] , identifier[category] = identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[inner] keyword[return] identifier[wrapper]
def deprecated(msg, dep_version): """Decorate a function, method or class to mark as deprecated. Raise DeprecationWarning and add a deprecation notice to the docstring. """ def wrapper(func): docstring = func.__doc__ or '' docstring_msg = '.. deprecated:: {version} {msg}'.format(version=dep_version, msg=msg) if docstring: # We don't know how far to indent this message # so instead we just dedent everything. string_list = docstring.splitlines() first_line = string_list[0] remaining = textwrap.dedent(''.join(string_list[1:])) docstring = '\n'.join([first_line, remaining, '', docstring_msg]) # depends on [control=['if'], data=[]] else: docstring = docstring_msg func.__doc__ = docstring @wraps(func) def inner(*args, **kwargs): warnings.warn(msg, category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) return inner return wrapper
def apply_mask(self, mask_img): """First set_mask and the get_masked_data. Parameters ---------- mask_img: nifti-like image, NeuroImage or str 3D mask array: True where a voxel should be used. Can either be: - a file path to a Nifti image - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image. If niimg is a string, consider it as a path to Nifti image and call nibabel.load on it. If it is an object, check if get_data() and get_affine() methods are present, raise TypeError otherwise. Returns ------- The masked data deepcopied """ self.set_mask(mask_img) return self.get_data(masked=True, smoothed=True, safe_copy=True)
def function[apply_mask, parameter[self, mask_img]]: constant[First set_mask and the get_masked_data. Parameters ---------- mask_img: nifti-like image, NeuroImage or str 3D mask array: True where a voxel should be used. Can either be: - a file path to a Nifti image - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image. If niimg is a string, consider it as a path to Nifti image and call nibabel.load on it. If it is an object, check if get_data() and get_affine() methods are present, raise TypeError otherwise. Returns ------- The masked data deepcopied ] call[name[self].set_mask, parameter[name[mask_img]]] return[call[name[self].get_data, parameter[]]]
keyword[def] identifier[apply_mask] ( identifier[self] , identifier[mask_img] ): literal[string] identifier[self] . identifier[set_mask] ( identifier[mask_img] ) keyword[return] identifier[self] . identifier[get_data] ( identifier[masked] = keyword[True] , identifier[smoothed] = keyword[True] , identifier[safe_copy] = keyword[True] )
def apply_mask(self, mask_img): """First set_mask and the get_masked_data. Parameters ---------- mask_img: nifti-like image, NeuroImage or str 3D mask array: True where a voxel should be used. Can either be: - a file path to a Nifti image - any object with get_data() and get_affine() methods, e.g., nibabel.Nifti1Image. If niimg is a string, consider it as a path to Nifti image and call nibabel.load on it. If it is an object, check if get_data() and get_affine() methods are present, raise TypeError otherwise. Returns ------- The masked data deepcopied """ self.set_mask(mask_img) return self.get_data(masked=True, smoothed=True, safe_copy=True)
def to_xml(self, opts = defaultdict(lambda: None)): ''' Generate XML from the current settings. ''' if not self.launch_url or not self.secure_launch_url: raise InvalidLTIConfigError('Invalid LTI configuration') root = etree.Element('cartridge_basiclti_link', attrib = { '{%s}%s' %(NSMAP['xsi'], 'schemaLocation'): 'http://www.imsglobal.org/xsd/imslticc_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticc_v1p0.xsd http://www.imsglobal.org/xsd/imsbasiclti_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imsbasiclti_v1p0p1.xsd http://www.imsglobal.org/xsd/imslticm_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticm_v1p0.xsd http://www.imsglobal.org/xsd/imslticp_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticp_v1p0.xsd', 'xmlns': 'http://www.imsglobal.org/xsd/imslticc_v1p0' }, nsmap = NSMAP) for key in ['title', 'description', 'launch_url', 'secure_launch_url']: option = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], key)) option.text = getattr(self, key) vendor_keys = ['name', 'code', 'description', 'url'] if any('vendor_' + key for key in vendor_keys) or\ self.vendor_contact_email: vendor_node = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], 'vendor')) for key in vendor_keys: if getattr(self, 'vendor_' + key) != None: v_node = etree.SubElement(vendor_node, '{%s}%s' %(NSMAP['lticp'], key)) v_node.text = getattr(self, 'vendor_' + key) if getattr(self, 'vendor_contact_email'): v_node = etree.SubElement(vendor_node, '{%s}%s' %(NSMAP['lticp'], 'contact')) c_name = etree.SubElement(v_node, '{%s}%s' %(NSMAP['lticp'], 'name')) c_name.text = self.vendor_contact_name c_email = etree.SubElement(v_node, '{%s}%s' %(NSMAP['lticp'], 'email')) c_email.text = self.vendor_contact_email # Custom params if len(self.custom_params) != 0: custom_node = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], 'custom')) for (key, val) in sorted(self.custom_params.items()): c_node = etree.SubElement(custom_node, '{%s}%s' %(NSMAP['lticm'], 'property')) c_node.set('name', key) c_node.text = val # Extension params if len(self.extensions) != 0: for (key, params) in sorted(self.extensions.items()): extension_node = etree.SubElement(root, '{%s}%s' %(NSMAP['blti'], 'extensions'), platform = key) self.recursive_options(extension_node,params) if getattr(self, 'cartridge_bundle'): identifierref = etree.SubElement(root, 'cartridge_bundle', identifierref = self.cartridge_bundle) if getattr(self, 'cartridge_icon'): identifierref = etree.SubElement(root, 'cartridge_icon', identifierref = self.cartridge_icon) return '<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(root)
def function[to_xml, parameter[self, opts]]: constant[ Generate XML from the current settings. ] if <ast.BoolOp object at 0x7da1b1b9d750> begin[:] <ast.Raise object at 0x7da1b1b9d690> variable[root] assign[=] call[name[etree].Element, parameter[constant[cartridge_basiclti_link]]] for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da1b1b9d270>, <ast.Constant object at 0x7da1b1b9d300>, <ast.Constant object at 0x7da1b1b9d4b0>, <ast.Constant object at 0x7da1b1b9f6d0>]]] begin[:] variable[option] assign[=] call[name[etree].SubElement, parameter[name[root], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9df00>, <ast.Name object at 0x7da1b1b9da80>]]]]] name[option].text assign[=] call[name[getattr], parameter[name[self], name[key]]] variable[vendor_keys] assign[=] list[[<ast.Constant object at 0x7da1b1b9ebc0>, <ast.Constant object at 0x7da1b1b9ec80>, <ast.Constant object at 0x7da1b1b9ec20>, <ast.Constant object at 0x7da1b1b9ed10>]] if <ast.BoolOp object at 0x7da1b1b9ece0> begin[:] variable[vendor_node] assign[=] call[name[etree].SubElement, parameter[name[root], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9dc30>, <ast.Constant object at 0x7da1b1b9d480>]]]]] for taget[name[key]] in starred[name[vendor_keys]] begin[:] if compare[call[name[getattr], parameter[name[self], binary_operation[constant[vendor_] + name[key]]]] not_equal[!=] constant[None]] begin[:] variable[v_node] assign[=] call[name[etree].SubElement, parameter[name[vendor_node], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9c3a0>, <ast.Name object at 0x7da1b1b9c040>]]]]] name[v_node].text assign[=] call[name[getattr], parameter[name[self], binary_operation[constant[vendor_] + name[key]]]] if call[name[getattr], parameter[name[self], constant[vendor_contact_email]]] begin[:] variable[v_node] assign[=] call[name[etree].SubElement, parameter[name[vendor_node], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9c430>, <ast.Constant object at 0x7da1b1b9c400>]]]]] variable[c_name] assign[=] call[name[etree].SubElement, parameter[name[v_node], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9c9d0>, <ast.Constant object at 0x7da1b1b9cbe0>]]]]] name[c_name].text assign[=] name[self].vendor_contact_name variable[c_email] assign[=] call[name[etree].SubElement, parameter[name[v_node], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9cca0>, <ast.Constant object at 0x7da1b1b9cdc0>]]]]] name[c_email].text assign[=] name[self].vendor_contact_email if compare[call[name[len], parameter[name[self].custom_params]] not_equal[!=] constant[0]] begin[:] variable[custom_node] assign[=] call[name[etree].SubElement, parameter[name[root], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9ee90>, <ast.Constant object at 0x7da1b1b9f370>]]]]] for taget[tuple[[<ast.Name object at 0x7da1b1b9e440>, <ast.Name object at 0x7da1b1b9f130>]]] in starred[call[name[sorted], parameter[call[name[self].custom_params.items, parameter[]]]]] begin[:] variable[c_node] assign[=] call[name[etree].SubElement, parameter[name[custom_node], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9cf40>, <ast.Constant object at 0x7da1b1b9d870>]]]]] call[name[c_node].set, parameter[constant[name], name[key]]] name[c_node].text assign[=] name[val] if compare[call[name[len], parameter[name[self].extensions]] not_equal[!=] constant[0]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b1b9d4e0>, <ast.Name object at 0x7da1b1b9d420>]]] in starred[call[name[sorted], parameter[call[name[self].extensions.items, parameter[]]]]] begin[:] variable[extension_node] assign[=] call[name[etree].SubElement, parameter[name[root], binary_operation[constant[{%s}%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b1b9dd50>, <ast.Constant object at 0x7da1b1b9d360>]]]]] call[name[self].recursive_options, parameter[name[extension_node], name[params]]] if call[name[getattr], parameter[name[self], constant[cartridge_bundle]]] begin[:] variable[identifierref] assign[=] call[name[etree].SubElement, parameter[name[root], constant[cartridge_bundle]]] if call[name[getattr], parameter[name[self], constant[cartridge_icon]]] begin[:] variable[identifierref] assign[=] call[name[etree].SubElement, parameter[name[root], constant[cartridge_icon]]] return[binary_operation[constant[<?xml version="1.0" encoding="UTF-8"?>] + call[name[etree].tostring, parameter[name[root]]]]]
keyword[def] identifier[to_xml] ( identifier[self] , identifier[opts] = identifier[defaultdict] ( keyword[lambda] : keyword[None] )): literal[string] keyword[if] keyword[not] identifier[self] . identifier[launch_url] keyword[or] keyword[not] identifier[self] . identifier[secure_launch_url] : keyword[raise] identifier[InvalidLTIConfigError] ( literal[string] ) identifier[root] = identifier[etree] . identifier[Element] ( literal[string] , identifier[attrib] ={ literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] ): literal[string] , literal[string] : literal[string] }, identifier[nsmap] = identifier[NSMAP] ) keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[option] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] %( identifier[NSMAP] [ literal[string] ], identifier[key] )) identifier[option] . identifier[text] = identifier[getattr] ( identifier[self] , identifier[key] ) identifier[vendor_keys] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[any] ( literal[string] + identifier[key] keyword[for] identifier[key] keyword[in] identifier[vendor_keys] ) keyword[or] identifier[self] . identifier[vendor_contact_email] : identifier[vendor_node] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] )) keyword[for] identifier[key] keyword[in] identifier[vendor_keys] : keyword[if] identifier[getattr] ( identifier[self] , literal[string] + identifier[key] )!= keyword[None] : identifier[v_node] = identifier[etree] . identifier[SubElement] ( identifier[vendor_node] , literal[string] %( identifier[NSMAP] [ literal[string] ], identifier[key] )) identifier[v_node] . identifier[text] = identifier[getattr] ( identifier[self] , literal[string] + identifier[key] ) keyword[if] identifier[getattr] ( identifier[self] , literal[string] ): identifier[v_node] = identifier[etree] . identifier[SubElement] ( identifier[vendor_node] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] )) identifier[c_name] = identifier[etree] . identifier[SubElement] ( identifier[v_node] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] )) identifier[c_name] . identifier[text] = identifier[self] . identifier[vendor_contact_name] identifier[c_email] = identifier[etree] . identifier[SubElement] ( identifier[v_node] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] )) identifier[c_email] . identifier[text] = identifier[self] . identifier[vendor_contact_email] keyword[if] identifier[len] ( identifier[self] . identifier[custom_params] )!= literal[int] : identifier[custom_node] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] )) keyword[for] ( identifier[key] , identifier[val] ) keyword[in] identifier[sorted] ( identifier[self] . identifier[custom_params] . identifier[items] ()): identifier[c_node] = identifier[etree] . identifier[SubElement] ( identifier[custom_node] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] )) identifier[c_node] . identifier[set] ( literal[string] , identifier[key] ) identifier[c_node] . identifier[text] = identifier[val] keyword[if] identifier[len] ( identifier[self] . identifier[extensions] )!= literal[int] : keyword[for] ( identifier[key] , identifier[params] ) keyword[in] identifier[sorted] ( identifier[self] . identifier[extensions] . identifier[items] ()): identifier[extension_node] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] %( identifier[NSMAP] [ literal[string] ], literal[string] ), identifier[platform] = identifier[key] ) identifier[self] . identifier[recursive_options] ( identifier[extension_node] , identifier[params] ) keyword[if] identifier[getattr] ( identifier[self] , literal[string] ): identifier[identifierref] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] , identifier[identifierref] = identifier[self] . identifier[cartridge_bundle] ) keyword[if] identifier[getattr] ( identifier[self] , literal[string] ): identifier[identifierref] = identifier[etree] . identifier[SubElement] ( identifier[root] , literal[string] , identifier[identifierref] = identifier[self] . identifier[cartridge_icon] ) keyword[return] literal[string] + identifier[etree] . identifier[tostring] ( identifier[root] )
def to_xml(self, opts=defaultdict(lambda : None)): """ Generate XML from the current settings. """ if not self.launch_url or not self.secure_launch_url: raise InvalidLTIConfigError('Invalid LTI configuration') # depends on [control=['if'], data=[]] root = etree.Element('cartridge_basiclti_link', attrib={'{%s}%s' % (NSMAP['xsi'], 'schemaLocation'): 'http://www.imsglobal.org/xsd/imslticc_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticc_v1p0.xsd http://www.imsglobal.org/xsd/imsbasiclti_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imsbasiclti_v1p0p1.xsd http://www.imsglobal.org/xsd/imslticm_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticm_v1p0.xsd http://www.imsglobal.org/xsd/imslticp_v1p0 http://www.imsglobal.org/xsd/lti/ltiv1p0/imslticp_v1p0.xsd', 'xmlns': 'http://www.imsglobal.org/xsd/imslticc_v1p0'}, nsmap=NSMAP) for key in ['title', 'description', 'launch_url', 'secure_launch_url']: option = etree.SubElement(root, '{%s}%s' % (NSMAP['blti'], key)) option.text = getattr(self, key) # depends on [control=['for'], data=['key']] vendor_keys = ['name', 'code', 'description', 'url'] if any(('vendor_' + key for key in vendor_keys)) or self.vendor_contact_email: vendor_node = etree.SubElement(root, '{%s}%s' % (NSMAP['blti'], 'vendor')) for key in vendor_keys: if getattr(self, 'vendor_' + key) != None: v_node = etree.SubElement(vendor_node, '{%s}%s' % (NSMAP['lticp'], key)) v_node.text = getattr(self, 'vendor_' + key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] if getattr(self, 'vendor_contact_email'): v_node = etree.SubElement(vendor_node, '{%s}%s' % (NSMAP['lticp'], 'contact')) c_name = etree.SubElement(v_node, '{%s}%s' % (NSMAP['lticp'], 'name')) c_name.text = self.vendor_contact_name c_email = etree.SubElement(v_node, '{%s}%s' % (NSMAP['lticp'], 'email')) c_email.text = self.vendor_contact_email # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Custom params if len(self.custom_params) != 0: custom_node = etree.SubElement(root, '{%s}%s' % (NSMAP['blti'], 'custom')) for (key, val) in sorted(self.custom_params.items()): c_node = etree.SubElement(custom_node, '{%s}%s' % (NSMAP['lticm'], 'property')) c_node.set('name', key) c_node.text = val # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # Extension params if len(self.extensions) != 0: for (key, params) in sorted(self.extensions.items()): extension_node = etree.SubElement(root, '{%s}%s' % (NSMAP['blti'], 'extensions'), platform=key) self.recursive_options(extension_node, params) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if getattr(self, 'cartridge_bundle'): identifierref = etree.SubElement(root, 'cartridge_bundle', identifierref=self.cartridge_bundle) # depends on [control=['if'], data=[]] if getattr(self, 'cartridge_icon'): identifierref = etree.SubElement(root, 'cartridge_icon', identifierref=self.cartridge_icon) # depends on [control=['if'], data=[]] return '<?xml version="1.0" encoding="UTF-8"?>' + etree.tostring(root)
def formatExcept(cls, error, trace): """ Formats the inputted class, error, and traceback information to the standard output commonly found in Python interpreters. :param cls | <type> error | <str> trace | <traceback> :return <str> """ clsname = cls.__name__ if cls else 'UnknownError' tb = 'Traceback (most recent call last):\n' tb += ''.join(traceback.format_tb(trace)) tb += '{0}: {1}'.format(clsname, error) return tb
def function[formatExcept, parameter[cls, error, trace]]: constant[ Formats the inputted class, error, and traceback information to the standard output commonly found in Python interpreters. :param cls | <type> error | <str> trace | <traceback> :return <str> ] variable[clsname] assign[=] <ast.IfExp object at 0x7da1b274a110> variable[tb] assign[=] constant[Traceback (most recent call last): ] <ast.AugAssign object at 0x7da1b274a0b0> <ast.AugAssign object at 0x7da1b274ab00> return[name[tb]]
keyword[def] identifier[formatExcept] ( identifier[cls] , identifier[error] , identifier[trace] ): literal[string] identifier[clsname] = identifier[cls] . identifier[__name__] keyword[if] identifier[cls] keyword[else] literal[string] identifier[tb] = literal[string] identifier[tb] += literal[string] . identifier[join] ( identifier[traceback] . identifier[format_tb] ( identifier[trace] )) identifier[tb] += literal[string] . identifier[format] ( identifier[clsname] , identifier[error] ) keyword[return] identifier[tb]
def formatExcept(cls, error, trace): """ Formats the inputted class, error, and traceback information to the standard output commonly found in Python interpreters. :param cls | <type> error | <str> trace | <traceback> :return <str> """ clsname = cls.__name__ if cls else 'UnknownError' tb = 'Traceback (most recent call last):\n' tb += ''.join(traceback.format_tb(trace)) tb += '{0}: {1}'.format(clsname, error) return tb
def QA_fetch_financial_report(code, report_date, ltype='EN', db=DATABASE): """获取专业财务报表 Arguments: code {[type]} -- [description] report_date {[type]} -- [description] Keyword Arguments: ltype {str} -- [description] (default: {'EN'}) db {[type]} -- [description] (default: {DATABASE}) Raises: e -- [description] Returns: pd.DataFrame -- [description] """ if isinstance(code, str): code = [code] if isinstance(report_date, str): report_date = [QA_util_date_str2int(report_date)] elif isinstance(report_date, int): report_date = [report_date] elif isinstance(report_date, list): report_date = [QA_util_date_str2int(item) for item in report_date] collection = db.financial num_columns = [item[:3] for item in list(financial_dict.keys())] CH_columns = [item[3:] for item in list(financial_dict.keys())] EN_columns = list(financial_dict.values()) #num_columns.extend(['283', '_id', 'code', 'report_date']) # CH_columns.extend(['283', '_id', 'code', 'report_date']) #CH_columns = pd.Index(CH_columns) #EN_columns = list(financial_dict.values()) #EN_columns.extend(['283', '_id', 'code', 'report_date']) #EN_columns = pd.Index(EN_columns) try: if code is not None and report_date is not None: data = [item for item in collection.find( {'code': {'$in': code}, 'report_date': {'$in': report_date}}, {"_id": 0}, batch_size=10000)] elif code is None and report_date is not None: data = [item for item in collection.find( {'report_date': {'$in': report_date}}, {"_id": 0}, batch_size=10000)] elif code is not None and report_date is None: data = [item for item in collection.find( {'code': {'$in': code}}, {"_id": 0}, batch_size=10000)] else: data = [item for item in collection.find({}, {"_id": 0})] if len(data) > 0: res_pd = pd.DataFrame(data) if ltype in ['CH', 'CN']: cndict = dict(zip(num_columns, CH_columns)) cndict['283'] = '283' try: cndict['284'] = '284' cndict['285'] = '285' cndict['286'] = '286' except: pass cndict['code'] = 'code' cndict['report_date'] = 'report_date' res_pd.columns = res_pd.columns.map(lambda x: cndict[x]) elif ltype is 'EN': endict = dict(zip(num_columns, EN_columns)) endict['283'] = '283' try: endict['284'] = '284' endict['285'] = '285' endict['286'] = '286' except: pass endict['code'] = 'code' endict['report_date'] = 'report_date' res_pd.columns = res_pd.columns.map(lambda x: endict[x]) if res_pd.report_date.dtype == numpy.int64: res_pd.report_date = pd.to_datetime( res_pd.report_date.apply(QA_util_date_int2str)) else: res_pd.report_date = pd.to_datetime(res_pd.report_date) return res_pd.replace(-4.039810335e+34, numpy.nan).set_index(['report_date', 'code'], drop=False) else: return None except Exception as e: raise e
def function[QA_fetch_financial_report, parameter[code, report_date, ltype, db]]: constant[获取专业财务报表 Arguments: code {[type]} -- [description] report_date {[type]} -- [description] Keyword Arguments: ltype {str} -- [description] (default: {'EN'}) db {[type]} -- [description] (default: {DATABASE}) Raises: e -- [description] Returns: pd.DataFrame -- [description] ] if call[name[isinstance], parameter[name[code], name[str]]] begin[:] variable[code] assign[=] list[[<ast.Name object at 0x7da1b1fdfc10>]] if call[name[isinstance], parameter[name[report_date], name[str]]] begin[:] variable[report_date] assign[=] list[[<ast.Call object at 0x7da1b1fdfa60>]] variable[collection] assign[=] name[db].financial variable[num_columns] assign[=] <ast.ListComp object at 0x7da1b1fdf430> variable[CH_columns] assign[=] <ast.ListComp object at 0x7da1b1fdf160> variable[EN_columns] assign[=] call[name[list], parameter[call[name[financial_dict].values, parameter[]]]] <ast.Try object at 0x7da1b1fdeda0>
keyword[def] identifier[QA_fetch_financial_report] ( identifier[code] , identifier[report_date] , identifier[ltype] = literal[string] , identifier[db] = identifier[DATABASE] ): literal[string] keyword[if] identifier[isinstance] ( identifier[code] , identifier[str] ): identifier[code] =[ identifier[code] ] keyword[if] identifier[isinstance] ( identifier[report_date] , identifier[str] ): identifier[report_date] =[ identifier[QA_util_date_str2int] ( identifier[report_date] )] keyword[elif] identifier[isinstance] ( identifier[report_date] , identifier[int] ): identifier[report_date] =[ identifier[report_date] ] keyword[elif] identifier[isinstance] ( identifier[report_date] , identifier[list] ): identifier[report_date] =[ identifier[QA_util_date_str2int] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[report_date] ] identifier[collection] = identifier[db] . identifier[financial] identifier[num_columns] =[ identifier[item] [: literal[int] ] keyword[for] identifier[item] keyword[in] identifier[list] ( identifier[financial_dict] . identifier[keys] ())] identifier[CH_columns] =[ identifier[item] [ literal[int] :] keyword[for] identifier[item] keyword[in] identifier[list] ( identifier[financial_dict] . identifier[keys] ())] identifier[EN_columns] = identifier[list] ( identifier[financial_dict] . identifier[values] ()) keyword[try] : keyword[if] identifier[code] keyword[is] keyword[not] keyword[None] keyword[and] identifier[report_date] keyword[is] keyword[not] keyword[None] : identifier[data] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collection] . identifier[find] ( { literal[string] :{ literal[string] : identifier[code] }, literal[string] :{ literal[string] : identifier[report_date] }},{ literal[string] : literal[int] }, identifier[batch_size] = literal[int] )] keyword[elif] identifier[code] keyword[is] keyword[None] keyword[and] identifier[report_date] keyword[is] keyword[not] keyword[None] : identifier[data] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collection] . identifier[find] ( { literal[string] :{ literal[string] : identifier[report_date] }},{ literal[string] : literal[int] }, identifier[batch_size] = literal[int] )] keyword[elif] identifier[code] keyword[is] keyword[not] keyword[None] keyword[and] identifier[report_date] keyword[is] keyword[None] : identifier[data] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collection] . identifier[find] ( { literal[string] :{ literal[string] : identifier[code] }},{ literal[string] : literal[int] }, identifier[batch_size] = literal[int] )] keyword[else] : identifier[data] =[ identifier[item] keyword[for] identifier[item] keyword[in] identifier[collection] . identifier[find] ({},{ literal[string] : literal[int] })] keyword[if] identifier[len] ( identifier[data] )> literal[int] : identifier[res_pd] = identifier[pd] . identifier[DataFrame] ( identifier[data] ) keyword[if] identifier[ltype] keyword[in] [ literal[string] , literal[string] ]: identifier[cndict] = identifier[dict] ( identifier[zip] ( identifier[num_columns] , identifier[CH_columns] )) identifier[cndict] [ literal[string] ]= literal[string] keyword[try] : identifier[cndict] [ literal[string] ]= literal[string] identifier[cndict] [ literal[string] ]= literal[string] identifier[cndict] [ literal[string] ]= literal[string] keyword[except] : keyword[pass] identifier[cndict] [ literal[string] ]= literal[string] identifier[cndict] [ literal[string] ]= literal[string] identifier[res_pd] . identifier[columns] = identifier[res_pd] . identifier[columns] . identifier[map] ( keyword[lambda] identifier[x] : identifier[cndict] [ identifier[x] ]) keyword[elif] identifier[ltype] keyword[is] literal[string] : identifier[endict] = identifier[dict] ( identifier[zip] ( identifier[num_columns] , identifier[EN_columns] )) identifier[endict] [ literal[string] ]= literal[string] keyword[try] : identifier[endict] [ literal[string] ]= literal[string] identifier[endict] [ literal[string] ]= literal[string] identifier[endict] [ literal[string] ]= literal[string] keyword[except] : keyword[pass] identifier[endict] [ literal[string] ]= literal[string] identifier[endict] [ literal[string] ]= literal[string] identifier[res_pd] . identifier[columns] = identifier[res_pd] . identifier[columns] . identifier[map] ( keyword[lambda] identifier[x] : identifier[endict] [ identifier[x] ]) keyword[if] identifier[res_pd] . identifier[report_date] . identifier[dtype] == identifier[numpy] . identifier[int64] : identifier[res_pd] . identifier[report_date] = identifier[pd] . identifier[to_datetime] ( identifier[res_pd] . identifier[report_date] . identifier[apply] ( identifier[QA_util_date_int2str] )) keyword[else] : identifier[res_pd] . identifier[report_date] = identifier[pd] . identifier[to_datetime] ( identifier[res_pd] . identifier[report_date] ) keyword[return] identifier[res_pd] . identifier[replace] (- literal[int] , identifier[numpy] . identifier[nan] ). identifier[set_index] ([ literal[string] , literal[string] ], identifier[drop] = keyword[False] ) keyword[else] : keyword[return] keyword[None] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[e]
def QA_fetch_financial_report(code, report_date, ltype='EN', db=DATABASE): """获取专业财务报表 Arguments: code {[type]} -- [description] report_date {[type]} -- [description] Keyword Arguments: ltype {str} -- [description] (default: {'EN'}) db {[type]} -- [description] (default: {DATABASE}) Raises: e -- [description] Returns: pd.DataFrame -- [description] """ if isinstance(code, str): code = [code] # depends on [control=['if'], data=[]] if isinstance(report_date, str): report_date = [QA_util_date_str2int(report_date)] # depends on [control=['if'], data=[]] elif isinstance(report_date, int): report_date = [report_date] # depends on [control=['if'], data=[]] elif isinstance(report_date, list): report_date = [QA_util_date_str2int(item) for item in report_date] # depends on [control=['if'], data=[]] collection = db.financial num_columns = [item[:3] for item in list(financial_dict.keys())] CH_columns = [item[3:] for item in list(financial_dict.keys())] EN_columns = list(financial_dict.values()) #num_columns.extend(['283', '_id', 'code', 'report_date']) # CH_columns.extend(['283', '_id', 'code', 'report_date']) #CH_columns = pd.Index(CH_columns) #EN_columns = list(financial_dict.values()) #EN_columns.extend(['283', '_id', 'code', 'report_date']) #EN_columns = pd.Index(EN_columns) try: if code is not None and report_date is not None: data = [item for item in collection.find({'code': {'$in': code}, 'report_date': {'$in': report_date}}, {'_id': 0}, batch_size=10000)] # depends on [control=['if'], data=[]] elif code is None and report_date is not None: data = [item for item in collection.find({'report_date': {'$in': report_date}}, {'_id': 0}, batch_size=10000)] # depends on [control=['if'], data=[]] elif code is not None and report_date is None: data = [item for item in collection.find({'code': {'$in': code}}, {'_id': 0}, batch_size=10000)] # depends on [control=['if'], data=[]] else: data = [item for item in collection.find({}, {'_id': 0})] if len(data) > 0: res_pd = pd.DataFrame(data) if ltype in ['CH', 'CN']: cndict = dict(zip(num_columns, CH_columns)) cndict['283'] = '283' try: cndict['284'] = '284' cndict['285'] = '285' cndict['286'] = '286' # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] cndict['code'] = 'code' cndict['report_date'] = 'report_date' res_pd.columns = res_pd.columns.map(lambda x: cndict[x]) # depends on [control=['if'], data=[]] elif ltype is 'EN': endict = dict(zip(num_columns, EN_columns)) endict['283'] = '283' try: endict['284'] = '284' endict['285'] = '285' endict['286'] = '286' # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] endict['code'] = 'code' endict['report_date'] = 'report_date' res_pd.columns = res_pd.columns.map(lambda x: endict[x]) # depends on [control=['if'], data=[]] if res_pd.report_date.dtype == numpy.int64: res_pd.report_date = pd.to_datetime(res_pd.report_date.apply(QA_util_date_int2str)) # depends on [control=['if'], data=[]] else: res_pd.report_date = pd.to_datetime(res_pd.report_date) return res_pd.replace(-4.039810335e+34, numpy.nan).set_index(['report_date', 'code'], drop=False) # depends on [control=['if'], data=[]] else: return None # depends on [control=['try'], data=[]] except Exception as e: raise e # depends on [control=['except'], data=['e']]
def set_object_text(self, text, force_refresh=False, ignore_unknown=False): """Set object analyzed by Help""" if (self.locked and not force_refresh): return self.switch_to_console_source() add_to_combo = True if text is None: text = to_text_string(self.combo.currentText()) add_to_combo = False found = self.show_help(text, ignore_unknown=ignore_unknown) if ignore_unknown and not found: return if add_to_combo: self.combo.add_text(text) if found: self.save_history() if self.dockwidget is not None: self.dockwidget.blockSignals(True) self.__eventually_raise_help(text, force=force_refresh) if self.dockwidget is not None: self.dockwidget.blockSignals(False)
def function[set_object_text, parameter[self, text, force_refresh, ignore_unknown]]: constant[Set object analyzed by Help] if <ast.BoolOp object at 0x7da1b21d4730> begin[:] return[None] call[name[self].switch_to_console_source, parameter[]] variable[add_to_combo] assign[=] constant[True] if compare[name[text] is constant[None]] begin[:] variable[text] assign[=] call[name[to_text_string], parameter[call[name[self].combo.currentText, parameter[]]]] variable[add_to_combo] assign[=] constant[False] variable[found] assign[=] call[name[self].show_help, parameter[name[text]]] if <ast.BoolOp object at 0x7da20e954d00> begin[:] return[None] if name[add_to_combo] begin[:] call[name[self].combo.add_text, parameter[name[text]]] if name[found] begin[:] call[name[self].save_history, parameter[]] if compare[name[self].dockwidget is_not constant[None]] begin[:] call[name[self].dockwidget.blockSignals, parameter[constant[True]]] call[name[self].__eventually_raise_help, parameter[name[text]]] if compare[name[self].dockwidget is_not constant[None]] begin[:] call[name[self].dockwidget.blockSignals, parameter[constant[False]]]
keyword[def] identifier[set_object_text] ( identifier[self] , identifier[text] , identifier[force_refresh] = keyword[False] , identifier[ignore_unknown] = keyword[False] ): literal[string] keyword[if] ( identifier[self] . identifier[locked] keyword[and] keyword[not] identifier[force_refresh] ): keyword[return] identifier[self] . identifier[switch_to_console_source] () identifier[add_to_combo] = keyword[True] keyword[if] identifier[text] keyword[is] keyword[None] : identifier[text] = identifier[to_text_string] ( identifier[self] . identifier[combo] . identifier[currentText] ()) identifier[add_to_combo] = keyword[False] identifier[found] = identifier[self] . identifier[show_help] ( identifier[text] , identifier[ignore_unknown] = identifier[ignore_unknown] ) keyword[if] identifier[ignore_unknown] keyword[and] keyword[not] identifier[found] : keyword[return] keyword[if] identifier[add_to_combo] : identifier[self] . identifier[combo] . identifier[add_text] ( identifier[text] ) keyword[if] identifier[found] : identifier[self] . identifier[save_history] () keyword[if] identifier[self] . identifier[dockwidget] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[dockwidget] . identifier[blockSignals] ( keyword[True] ) identifier[self] . identifier[__eventually_raise_help] ( identifier[text] , identifier[force] = identifier[force_refresh] ) keyword[if] identifier[self] . identifier[dockwidget] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[dockwidget] . identifier[blockSignals] ( keyword[False] )
def set_object_text(self, text, force_refresh=False, ignore_unknown=False): """Set object analyzed by Help""" if self.locked and (not force_refresh): return # depends on [control=['if'], data=[]] self.switch_to_console_source() add_to_combo = True if text is None: text = to_text_string(self.combo.currentText()) add_to_combo = False # depends on [control=['if'], data=['text']] found = self.show_help(text, ignore_unknown=ignore_unknown) if ignore_unknown and (not found): return # depends on [control=['if'], data=[]] if add_to_combo: self.combo.add_text(text) # depends on [control=['if'], data=[]] if found: self.save_history() # depends on [control=['if'], data=[]] if self.dockwidget is not None: self.dockwidget.blockSignals(True) # depends on [control=['if'], data=[]] self.__eventually_raise_help(text, force=force_refresh) if self.dockwidget is not None: self.dockwidget.blockSignals(False) # depends on [control=['if'], data=[]]