code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _on_dirty_changed(self, dirty): """ Adds a star in front of a dirtt tab and emits dirty_changed. """ try: title = self._current._tab_name index = self.indexOf(self._current) if dirty: self.setTabText(index, "* " + title) else: self.setTabText(index, title) except AttributeError: pass self.dirty_changed.emit(dirty)
def function[_on_dirty_changed, parameter[self, dirty]]: constant[ Adds a star in front of a dirtt tab and emits dirty_changed. ] <ast.Try object at 0x7da18f7222c0> call[name[self].dirty_changed.emit, parameter[name[dirty]]]
keyword[def] identifier[_on_dirty_changed] ( identifier[self] , identifier[dirty] ): literal[string] keyword[try] : identifier[title] = identifier[self] . identifier[_current] . identifier[_tab_name] identifier[index] = identifier[self] . identifier[indexOf] ( identifier[self] . identifier[_current] ) keyword[if] identifier[dirty] : identifier[self] . identifier[setTabText] ( identifier[index] , literal[string] + identifier[title] ) keyword[else] : identifier[self] . identifier[setTabText] ( identifier[index] , identifier[title] ) keyword[except] identifier[AttributeError] : keyword[pass] identifier[self] . identifier[dirty_changed] . identifier[emit] ( identifier[dirty] )
def _on_dirty_changed(self, dirty): """ Adds a star in front of a dirtt tab and emits dirty_changed. """ try: title = self._current._tab_name index = self.indexOf(self._current) if dirty: self.setTabText(index, '* ' + title) # depends on [control=['if'], data=[]] else: self.setTabText(index, title) # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] self.dirty_changed.emit(dirty)
def getmap(self, path, query=None): """ Performs a GET request where the response content type is required to be "application/json" and the content is a JSON-encoded data structure. The decoded structure is returned. """ code, data, ctype = self.get(path, query) if ctype != 'application/json': self.log.error("Expecting JSON from GET of '%s', got '%s'", self.lastpath, ctype) raise HttpError(code=400, content_type='text/plain', content='Remote returned invalid content type: '+ctype) try: result = json.loads(data) except Exception as e: # pragma: no cover self.log.error("Could not load JSON content from GET %r -- %s", self.lastpath, e) raise HttpError(code=400, content_type='text/plain', content='Could not load JSON content') return result
def function[getmap, parameter[self, path, query]]: constant[ Performs a GET request where the response content type is required to be "application/json" and the content is a JSON-encoded data structure. The decoded structure is returned. ] <ast.Tuple object at 0x7da18bc73520> assign[=] call[name[self].get, parameter[name[path], name[query]]] if compare[name[ctype] not_equal[!=] constant[application/json]] begin[:] call[name[self].log.error, parameter[constant[Expecting JSON from GET of '%s', got '%s'], name[self].lastpath, name[ctype]]] <ast.Raise object at 0x7da18bc72320> <ast.Try object at 0x7da2045647c0> return[name[result]]
keyword[def] identifier[getmap] ( identifier[self] , identifier[path] , identifier[query] = keyword[None] ): literal[string] identifier[code] , identifier[data] , identifier[ctype] = identifier[self] . identifier[get] ( identifier[path] , identifier[query] ) keyword[if] identifier[ctype] != literal[string] : identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[self] . identifier[lastpath] , identifier[ctype] ) keyword[raise] identifier[HttpError] ( identifier[code] = literal[int] , identifier[content_type] = literal[string] , identifier[content] = literal[string] + identifier[ctype] ) keyword[try] : identifier[result] = identifier[json] . identifier[loads] ( identifier[data] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[log] . identifier[error] ( literal[string] , identifier[self] . identifier[lastpath] , identifier[e] ) keyword[raise] identifier[HttpError] ( identifier[code] = literal[int] , identifier[content_type] = literal[string] , identifier[content] = literal[string] ) keyword[return] identifier[result]
def getmap(self, path, query=None): """ Performs a GET request where the response content type is required to be "application/json" and the content is a JSON-encoded data structure. The decoded structure is returned. """ (code, data, ctype) = self.get(path, query) if ctype != 'application/json': self.log.error("Expecting JSON from GET of '%s', got '%s'", self.lastpath, ctype) raise HttpError(code=400, content_type='text/plain', content='Remote returned invalid content type: ' + ctype) # depends on [control=['if'], data=['ctype']] try: result = json.loads(data) # depends on [control=['try'], data=[]] except Exception as e: # pragma: no cover self.log.error('Could not load JSON content from GET %r -- %s', self.lastpath, e) raise HttpError(code=400, content_type='text/plain', content='Could not load JSON content') # depends on [control=['except'], data=['e']] return result
def update(self, name, **kwargs): """ Update existing role. http://www.keycloak.org/docs-api/3.4/rest-api/index.html#_roles_resource :param str name: Name for the role :param str description: (optional) :param str id: (optional) :param bool client_role: (optional) :param bool composite: (optional) :param object composites: (optional) :param str container_id: (optional) :param bool scope_param_required: (optional) """ payload = OrderedDict(name=name) for key in ROLE_KWARGS: if key in kwargs: payload[to_camel_case(key)] = kwargs[key] return self._client.put( url=self._client.get_full_url( self.get_path('single', realm=self._realm_name, id=self._client_id, role_name=self._role_name) ), data=json.dumps(payload, sort_keys=True) )
def function[update, parameter[self, name]]: constant[ Update existing role. http://www.keycloak.org/docs-api/3.4/rest-api/index.html#_roles_resource :param str name: Name for the role :param str description: (optional) :param str id: (optional) :param bool client_role: (optional) :param bool composite: (optional) :param object composites: (optional) :param str container_id: (optional) :param bool scope_param_required: (optional) ] variable[payload] assign[=] call[name[OrderedDict], parameter[]] for taget[name[key]] in starred[name[ROLE_KWARGS]] begin[:] if compare[name[key] in name[kwargs]] begin[:] call[name[payload]][call[name[to_camel_case], parameter[name[key]]]] assign[=] call[name[kwargs]][name[key]] return[call[name[self]._client.put, parameter[]]]
keyword[def] identifier[update] ( identifier[self] , identifier[name] ,** identifier[kwargs] ): literal[string] identifier[payload] = identifier[OrderedDict] ( identifier[name] = identifier[name] ) keyword[for] identifier[key] keyword[in] identifier[ROLE_KWARGS] : keyword[if] identifier[key] keyword[in] identifier[kwargs] : identifier[payload] [ identifier[to_camel_case] ( identifier[key] )]= identifier[kwargs] [ identifier[key] ] keyword[return] identifier[self] . identifier[_client] . identifier[put] ( identifier[url] = identifier[self] . identifier[_client] . identifier[get_full_url] ( identifier[self] . identifier[get_path] ( literal[string] , identifier[realm] = identifier[self] . identifier[_realm_name] , identifier[id] = identifier[self] . identifier[_client_id] , identifier[role_name] = identifier[self] . identifier[_role_name] ) ), identifier[data] = identifier[json] . identifier[dumps] ( identifier[payload] , identifier[sort_keys] = keyword[True] ) )
def update(self, name, **kwargs): """ Update existing role. http://www.keycloak.org/docs-api/3.4/rest-api/index.html#_roles_resource :param str name: Name for the role :param str description: (optional) :param str id: (optional) :param bool client_role: (optional) :param bool composite: (optional) :param object composites: (optional) :param str container_id: (optional) :param bool scope_param_required: (optional) """ payload = OrderedDict(name=name) for key in ROLE_KWARGS: if key in kwargs: payload[to_camel_case(key)] = kwargs[key] # depends on [control=['if'], data=['key', 'kwargs']] # depends on [control=['for'], data=['key']] return self._client.put(url=self._client.get_full_url(self.get_path('single', realm=self._realm_name, id=self._client_id, role_name=self._role_name)), data=json.dumps(payload, sort_keys=True))
def parse_environment_file(filename, world_size=(60, 60)): """ Extract information about spatial resources from an environment file. Arguments: filename - a string representing the path to the environment file. world_size - a tuple representing the x and y coordinates of the world. (default: 60x60) Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. """ infile = open(filename) lines = infile.readlines() infile.close() tasks = [] # Find all spatial resources and record which cells they're in res_order = [] res_dict = {} for line in lines: if line.startswith("GRADIENT_RESOURCE"): name, cells = parse_gradient(line, world_size) elif line.startswith("CELL"): name, cells = parse_cell(line, world_size) elif line.startswith("REACTION"): task = parse_reaction(line) if task not in tasks: tasks.append(task) else: continue dict_increment(res_dict, name, cells) if name not in res_order: res_order.append(name) # Create a map of niches across the environment and return it grid = make_niche_grid(res_dict, world_size) return EnvironmentFile(grid, res_order, world_size, filename, tasks)
def function[parse_environment_file, parameter[filename, world_size]]: constant[ Extract information about spatial resources from an environment file. Arguments: filename - a string representing the path to the environment file. world_size - a tuple representing the x and y coordinates of the world. (default: 60x60) Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. ] variable[infile] assign[=] call[name[open], parameter[name[filename]]] variable[lines] assign[=] call[name[infile].readlines, parameter[]] call[name[infile].close, parameter[]] variable[tasks] assign[=] list[[]] variable[res_order] assign[=] list[[]] variable[res_dict] assign[=] dictionary[[], []] for taget[name[line]] in starred[name[lines]] begin[:] if call[name[line].startswith, parameter[constant[GRADIENT_RESOURCE]]] begin[:] <ast.Tuple object at 0x7da1b1438370> assign[=] call[name[parse_gradient], parameter[name[line], name[world_size]]] call[name[dict_increment], parameter[name[res_dict], name[name], name[cells]]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[res_order]] begin[:] call[name[res_order].append, parameter[name[name]]] variable[grid] assign[=] call[name[make_niche_grid], parameter[name[res_dict], name[world_size]]] return[call[name[EnvironmentFile], parameter[name[grid], name[res_order], name[world_size], name[filename], name[tasks]]]]
keyword[def] identifier[parse_environment_file] ( identifier[filename] , identifier[world_size] =( literal[int] , literal[int] )): literal[string] identifier[infile] = identifier[open] ( identifier[filename] ) identifier[lines] = identifier[infile] . identifier[readlines] () identifier[infile] . identifier[close] () identifier[tasks] =[] identifier[res_order] =[] identifier[res_dict] ={} keyword[for] identifier[line] keyword[in] identifier[lines] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[name] , identifier[cells] = identifier[parse_gradient] ( identifier[line] , identifier[world_size] ) keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ): identifier[name] , identifier[cells] = identifier[parse_cell] ( identifier[line] , identifier[world_size] ) keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ): identifier[task] = identifier[parse_reaction] ( identifier[line] ) keyword[if] identifier[task] keyword[not] keyword[in] identifier[tasks] : identifier[tasks] . identifier[append] ( identifier[task] ) keyword[else] : keyword[continue] identifier[dict_increment] ( identifier[res_dict] , identifier[name] , identifier[cells] ) keyword[if] identifier[name] keyword[not] keyword[in] identifier[res_order] : identifier[res_order] . identifier[append] ( identifier[name] ) identifier[grid] = identifier[make_niche_grid] ( identifier[res_dict] , identifier[world_size] ) keyword[return] identifier[EnvironmentFile] ( identifier[grid] , identifier[res_order] , identifier[world_size] , identifier[filename] , identifier[tasks] )
def parse_environment_file(filename, world_size=(60, 60)): """ Extract information about spatial resources from an environment file. Arguments: filename - a string representing the path to the environment file. world_size - a tuple representing the x and y coordinates of the world. (default: 60x60) Returns a list of lists of sets indicating the set of resources available at each x,y location in the Avida grid. """ infile = open(filename) lines = infile.readlines() infile.close() tasks = [] # Find all spatial resources and record which cells they're in res_order = [] res_dict = {} for line in lines: if line.startswith('GRADIENT_RESOURCE'): (name, cells) = parse_gradient(line, world_size) # depends on [control=['if'], data=[]] elif line.startswith('CELL'): (name, cells) = parse_cell(line, world_size) # depends on [control=['if'], data=[]] elif line.startswith('REACTION'): task = parse_reaction(line) if task not in tasks: tasks.append(task) # depends on [control=['if'], data=['task', 'tasks']] # depends on [control=['if'], data=[]] else: continue dict_increment(res_dict, name, cells) if name not in res_order: res_order.append(name) # depends on [control=['if'], data=['name', 'res_order']] # depends on [control=['for'], data=['line']] # Create a map of niches across the environment and return it grid = make_niche_grid(res_dict, world_size) return EnvironmentFile(grid, res_order, world_size, filename, tasks)
def search_groups(self, args): """ Executes a search flickr:(credsfile),search,(arg1)=(val1),(arg2)=(val2)... """ kwargs = {'text': args[0]} return self._paged_api_call(self.flickr.groups_search, kwargs, 'group')
def function[search_groups, parameter[self, args]]: constant[ Executes a search flickr:(credsfile),search,(arg1)=(val1),(arg2)=(val2)... ] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b1622e60>], [<ast.Subscript object at 0x7da1b16206d0>]] return[call[name[self]._paged_api_call, parameter[name[self].flickr.groups_search, name[kwargs], constant[group]]]]
keyword[def] identifier[search_groups] ( identifier[self] , identifier[args] ): literal[string] identifier[kwargs] ={ literal[string] : identifier[args] [ literal[int] ]} keyword[return] identifier[self] . identifier[_paged_api_call] ( identifier[self] . identifier[flickr] . identifier[groups_search] , identifier[kwargs] , literal[string] )
def search_groups(self, args): """ Executes a search flickr:(credsfile),search,(arg1)=(val1),(arg2)=(val2)... """ kwargs = {'text': args[0]} return self._paged_api_call(self.flickr.groups_search, kwargs, 'group')
async def async_get_forecast(self) -> List[SmhiForecast]: """ Returns a list of forecasts. The first in list are the current one """ json_data = await self._api.async_get_forecast_api(self._longitude, self._latitude) return _get_forecast(json_data)
<ast.AsyncFunctionDef object at 0x7da2046226b0>
keyword[async] keyword[def] identifier[async_get_forecast] ( identifier[self] )-> identifier[List] [ identifier[SmhiForecast] ]: literal[string] identifier[json_data] = keyword[await] identifier[self] . identifier[_api] . identifier[async_get_forecast_api] ( identifier[self] . identifier[_longitude] , identifier[self] . identifier[_latitude] ) keyword[return] identifier[_get_forecast] ( identifier[json_data] )
async def async_get_forecast(self) -> List[SmhiForecast]: """ Returns a list of forecasts. The first in list are the current one """ json_data = await self._api.async_get_forecast_api(self._longitude, self._latitude) return _get_forecast(json_data)
def sum(self, projection=None): """ Takes sum of elements in sequence. >>> seq([1, 2, 3, 4]).sum() 10 >>> seq([(1, 2), (1, 3), (1, 4)]).sum(lambda x: x[0]) 3 :param projection: function to project on the sequence before taking the sum :return: sum of elements in sequence """ if projection: return sum(self.map(projection)) else: return sum(self)
def function[sum, parameter[self, projection]]: constant[ Takes sum of elements in sequence. >>> seq([1, 2, 3, 4]).sum() 10 >>> seq([(1, 2), (1, 3), (1, 4)]).sum(lambda x: x[0]) 3 :param projection: function to project on the sequence before taking the sum :return: sum of elements in sequence ] if name[projection] begin[:] return[call[name[sum], parameter[call[name[self].map, parameter[name[projection]]]]]]
keyword[def] identifier[sum] ( identifier[self] , identifier[projection] = keyword[None] ): literal[string] keyword[if] identifier[projection] : keyword[return] identifier[sum] ( identifier[self] . identifier[map] ( identifier[projection] )) keyword[else] : keyword[return] identifier[sum] ( identifier[self] )
def sum(self, projection=None): """ Takes sum of elements in sequence. >>> seq([1, 2, 3, 4]).sum() 10 >>> seq([(1, 2), (1, 3), (1, 4)]).sum(lambda x: x[0]) 3 :param projection: function to project on the sequence before taking the sum :return: sum of elements in sequence """ if projection: return sum(self.map(projection)) # depends on [control=['if'], data=[]] else: return sum(self)
def rsdl_rn(self, AX, Y): """Compute primal residual normalisation term.""" # The primal residual normalisation term is # max( ||A x^(k)||_2, ||B y^(k)||_2 ) and B = -(I I I ...)^T. # The scaling by sqrt(Nb) of the l2 norm of Y accounts for the # block replication introduced by multiplication by B return max((np.linalg.norm(AX), np.sqrt(self.Nb) * np.linalg.norm(Y)))
def function[rsdl_rn, parameter[self, AX, Y]]: constant[Compute primal residual normalisation term.] return[call[name[max], parameter[tuple[[<ast.Call object at 0x7da20c6c5db0>, <ast.BinOp object at 0x7da20c6c70d0>]]]]]
keyword[def] identifier[rsdl_rn] ( identifier[self] , identifier[AX] , identifier[Y] ): literal[string] keyword[return] identifier[max] (( identifier[np] . identifier[linalg] . identifier[norm] ( identifier[AX] ), identifier[np] . identifier[sqrt] ( identifier[self] . identifier[Nb] )* identifier[np] . identifier[linalg] . identifier[norm] ( identifier[Y] )))
def rsdl_rn(self, AX, Y): """Compute primal residual normalisation term.""" # The primal residual normalisation term is # max( ||A x^(k)||_2, ||B y^(k)||_2 ) and B = -(I I I ...)^T. # The scaling by sqrt(Nb) of the l2 norm of Y accounts for the # block replication introduced by multiplication by B return max((np.linalg.norm(AX), np.sqrt(self.Nb) * np.linalg.norm(Y)))
def unirange(a, b): """Returns a regular expression string to match the given non-BMP range.""" if b < a: raise ValueError("Bad character range") if a < 0x10000 or b < 0x10000: raise ValueError("unirange is only defined for non-BMP ranges") if sys.maxunicode > 0xffff: # wide build return u'[%s-%s]' % (unichr(a), unichr(b)) else: # narrow build stores surrogates, and the 're' module handles them # (incorrectly) as characters. Since there is still ordering among # these characters, expand the range to one that it understands. Some # background in http://bugs.python.org/issue3665 and # http://bugs.python.org/issue12749 # # Additionally, the lower constants are using unichr rather than # literals because jython [which uses the wide path] can't load this # file if they are literals. ah, al = _surrogatepair(a) bh, bl = _surrogatepair(b) if ah == bh: return u'(?:%s[%s-%s])' % (unichr(ah), unichr(al), unichr(bl)) else: buf = [] buf.append(u'%s[%s-%s]' % (unichr(ah), unichr(al), ah == bh and unichr(bl) or unichr(0xdfff))) if ah - bh > 1: buf.append(u'[%s-%s][%s-%s]' % unichr(ah+1), unichr(bh-1), unichr(0xdc00), unichr(0xdfff)) if ah != bh: buf.append(u'%s[%s-%s]' % (unichr(bh), unichr(0xdc00), unichr(bl))) return u'(?:' + u'|'.join(buf) + u')'
def function[unirange, parameter[a, b]]: constant[Returns a regular expression string to match the given non-BMP range.] if compare[name[b] less[<] name[a]] begin[:] <ast.Raise object at 0x7da20c6aa0b0> if <ast.BoolOp object at 0x7da20c6aa440> begin[:] <ast.Raise object at 0x7da20c6ab130> if compare[name[sys].maxunicode greater[>] constant[65535]] begin[:] return[binary_operation[constant[[%s-%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20c6abd90>, <ast.Call object at 0x7da2047e9ea0>]]]]
keyword[def] identifier[unirange] ( identifier[a] , identifier[b] ): literal[string] keyword[if] identifier[b] < identifier[a] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[a] < literal[int] keyword[or] identifier[b] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[sys] . identifier[maxunicode] > literal[int] : keyword[return] literal[string] %( identifier[unichr] ( identifier[a] ), identifier[unichr] ( identifier[b] )) keyword[else] : identifier[ah] , identifier[al] = identifier[_surrogatepair] ( identifier[a] ) identifier[bh] , identifier[bl] = identifier[_surrogatepair] ( identifier[b] ) keyword[if] identifier[ah] == identifier[bh] : keyword[return] literal[string] %( identifier[unichr] ( identifier[ah] ), identifier[unichr] ( identifier[al] ), identifier[unichr] ( identifier[bl] )) keyword[else] : identifier[buf] =[] identifier[buf] . identifier[append] ( literal[string] % ( identifier[unichr] ( identifier[ah] ), identifier[unichr] ( identifier[al] ), identifier[ah] == identifier[bh] keyword[and] identifier[unichr] ( identifier[bl] ) keyword[or] identifier[unichr] ( literal[int] ))) keyword[if] identifier[ah] - identifier[bh] > literal[int] : identifier[buf] . identifier[append] ( literal[string] % identifier[unichr] ( identifier[ah] + literal[int] ), identifier[unichr] ( identifier[bh] - literal[int] ), identifier[unichr] ( literal[int] ), identifier[unichr] ( literal[int] )) keyword[if] identifier[ah] != identifier[bh] : identifier[buf] . identifier[append] ( literal[string] % ( identifier[unichr] ( identifier[bh] ), identifier[unichr] ( literal[int] ), identifier[unichr] ( identifier[bl] ))) keyword[return] literal[string] + literal[string] . identifier[join] ( identifier[buf] )+ literal[string]
def unirange(a, b): """Returns a regular expression string to match the given non-BMP range.""" if b < a: raise ValueError('Bad character range') # depends on [control=['if'], data=[]] if a < 65536 or b < 65536: raise ValueError('unirange is only defined for non-BMP ranges') # depends on [control=['if'], data=[]] if sys.maxunicode > 65535: # wide build return u'[%s-%s]' % (unichr(a), unichr(b)) # depends on [control=['if'], data=[]] else: # narrow build stores surrogates, and the 're' module handles them # (incorrectly) as characters. Since there is still ordering among # these characters, expand the range to one that it understands. Some # background in http://bugs.python.org/issue3665 and # http://bugs.python.org/issue12749 # # Additionally, the lower constants are using unichr rather than # literals because jython [which uses the wide path] can't load this # file if they are literals. (ah, al) = _surrogatepair(a) (bh, bl) = _surrogatepair(b) if ah == bh: return u'(?:%s[%s-%s])' % (unichr(ah), unichr(al), unichr(bl)) # depends on [control=['if'], data=['ah']] else: buf = [] buf.append(u'%s[%s-%s]' % (unichr(ah), unichr(al), ah == bh and unichr(bl) or unichr(57343))) if ah - bh > 1: buf.append(u'[%s-%s][%s-%s]' % unichr(ah + 1), unichr(bh - 1), unichr(56320), unichr(57343)) # depends on [control=['if'], data=[]] if ah != bh: buf.append(u'%s[%s-%s]' % (unichr(bh), unichr(56320), unichr(bl))) # depends on [control=['if'], data=['bh']] return u'(?:' + u'|'.join(buf) + u')'
def _bitResponseToValue(bytestring): """Convert a response string to a numerical value. Args: bytestring (str): A string of length 1. Can be for example ``\\x01``. Returns: The converted value (int). Raises: TypeError, ValueError """ _checkString(bytestring, description='bytestring', minlength=1, maxlength=1) RESPONSE_ON = '\x01' RESPONSE_OFF = '\x00' if bytestring == RESPONSE_ON: return 1 elif bytestring == RESPONSE_OFF: return 0 else: raise ValueError('Could not convert bit response to a value. Input: {0!r}'.format(bytestring))
def function[_bitResponseToValue, parameter[bytestring]]: constant[Convert a response string to a numerical value. Args: bytestring (str): A string of length 1. Can be for example ``\x01``. Returns: The converted value (int). Raises: TypeError, ValueError ] call[name[_checkString], parameter[name[bytestring]]] variable[RESPONSE_ON] assign[=] constant[] variable[RESPONSE_OFF] assign[=] constant[] if compare[name[bytestring] equal[==] name[RESPONSE_ON]] begin[:] return[constant[1]]
keyword[def] identifier[_bitResponseToValue] ( identifier[bytestring] ): literal[string] identifier[_checkString] ( identifier[bytestring] , identifier[description] = literal[string] , identifier[minlength] = literal[int] , identifier[maxlength] = literal[int] ) identifier[RESPONSE_ON] = literal[string] identifier[RESPONSE_OFF] = literal[string] keyword[if] identifier[bytestring] == identifier[RESPONSE_ON] : keyword[return] literal[int] keyword[elif] identifier[bytestring] == identifier[RESPONSE_OFF] : keyword[return] literal[int] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[bytestring] ))
def _bitResponseToValue(bytestring): """Convert a response string to a numerical value. Args: bytestring (str): A string of length 1. Can be for example ``\\x01``. Returns: The converted value (int). Raises: TypeError, ValueError """ _checkString(bytestring, description='bytestring', minlength=1, maxlength=1) RESPONSE_ON = '\x01' RESPONSE_OFF = '\x00' if bytestring == RESPONSE_ON: return 1 # depends on [control=['if'], data=[]] elif bytestring == RESPONSE_OFF: return 0 # depends on [control=['if'], data=[]] else: raise ValueError('Could not convert bit response to a value. Input: {0!r}'.format(bytestring))
def generate(env): """Add Builders and construction variables for javac to an Environment.""" java_file = SCons.Tool.CreateJavaFileBuilder(env) java_class = SCons.Tool.CreateJavaClassFileBuilder(env) java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env) java_class.add_emitter(None, emit_java_classes) java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes) java_class_dir.emitter = emit_java_classes env.AddMethod(Java) env['JAVAC'] = 'javac' env['JAVACFLAGS'] = SCons.Util.CLVar('') env['JAVABOOTCLASSPATH'] = [] env['JAVACLASSPATH'] = [] env['JAVASOURCEPATH'] = [] env['_javapathopt'] = pathopt env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ' env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} ' env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ' env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}' env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES' env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}" env['JAVACLASSSUFFIX'] = '.class' env['JAVASUFFIX'] = '.java'
def function[generate, parameter[env]]: constant[Add Builders and construction variables for javac to an Environment.] variable[java_file] assign[=] call[name[SCons].Tool.CreateJavaFileBuilder, parameter[name[env]]] variable[java_class] assign[=] call[name[SCons].Tool.CreateJavaClassFileBuilder, parameter[name[env]]] variable[java_class_dir] assign[=] call[name[SCons].Tool.CreateJavaClassDirBuilder, parameter[name[env]]] call[name[java_class].add_emitter, parameter[constant[None], name[emit_java_classes]]] call[name[java_class].add_emitter, parameter[call[name[env].subst, parameter[constant[$JAVASUFFIX]]], name[emit_java_classes]]] name[java_class_dir].emitter assign[=] name[emit_java_classes] call[name[env].AddMethod, parameter[name[Java]]] call[name[env]][constant[JAVAC]] assign[=] constant[javac] call[name[env]][constant[JAVACFLAGS]] assign[=] call[name[SCons].Util.CLVar, parameter[constant[]]] call[name[env]][constant[JAVABOOTCLASSPATH]] assign[=] list[[]] call[name[env]][constant[JAVACLASSPATH]] assign[=] list[[]] call[name[env]][constant[JAVASOURCEPATH]] assign[=] list[[]] call[name[env]][constant[_javapathopt]] assign[=] name[pathopt] call[name[env]][constant[_JAVABOOTCLASSPATH]] assign[=] constant[${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ] call[name[env]][constant[_JAVACLASSPATH]] assign[=] constant[${_javapathopt("-classpath", "JAVACLASSPATH")} ] call[name[env]][constant[_JAVASOURCEPATH]] assign[=] constant[${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ] call[name[env]][constant[_JAVASOURCEPATHDEFAULT]] assign[=] constant[${TARGET.attributes.java_sourcedir}] call[name[env]][constant[_JAVACCOM]] assign[=] constant[$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES] call[name[env]][constant[JAVACCOM]] assign[=] constant[${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}] call[name[env]][constant[JAVACLASSSUFFIX]] assign[=] constant[.class] call[name[env]][constant[JAVASUFFIX]] assign[=] constant[.java]
keyword[def] identifier[generate] ( identifier[env] ): literal[string] identifier[java_file] = identifier[SCons] . identifier[Tool] . identifier[CreateJavaFileBuilder] ( identifier[env] ) identifier[java_class] = identifier[SCons] . identifier[Tool] . identifier[CreateJavaClassFileBuilder] ( identifier[env] ) identifier[java_class_dir] = identifier[SCons] . identifier[Tool] . identifier[CreateJavaClassDirBuilder] ( identifier[env] ) identifier[java_class] . identifier[add_emitter] ( keyword[None] , identifier[emit_java_classes] ) identifier[java_class] . identifier[add_emitter] ( identifier[env] . identifier[subst] ( literal[string] ), identifier[emit_java_classes] ) identifier[java_class_dir] . identifier[emitter] = identifier[emit_java_classes] identifier[env] . identifier[AddMethod] ( identifier[Java] ) identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= identifier[SCons] . identifier[Util] . identifier[CLVar] ( literal[string] ) identifier[env] [ literal[string] ]=[] identifier[env] [ literal[string] ]=[] identifier[env] [ literal[string] ]=[] identifier[env] [ literal[string] ]= identifier[pathopt] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string] identifier[env] [ literal[string] ]= literal[string]
def generate(env): """Add Builders and construction variables for javac to an Environment.""" java_file = SCons.Tool.CreateJavaFileBuilder(env) java_class = SCons.Tool.CreateJavaClassFileBuilder(env) java_class_dir = SCons.Tool.CreateJavaClassDirBuilder(env) java_class.add_emitter(None, emit_java_classes) java_class.add_emitter(env.subst('$JAVASUFFIX'), emit_java_classes) java_class_dir.emitter = emit_java_classes env.AddMethod(Java) env['JAVAC'] = 'javac' env['JAVACFLAGS'] = SCons.Util.CLVar('') env['JAVABOOTCLASSPATH'] = [] env['JAVACLASSPATH'] = [] env['JAVASOURCEPATH'] = [] env['_javapathopt'] = pathopt env['_JAVABOOTCLASSPATH'] = '${_javapathopt("-bootclasspath", "JAVABOOTCLASSPATH")} ' env['_JAVACLASSPATH'] = '${_javapathopt("-classpath", "JAVACLASSPATH")} ' env['_JAVASOURCEPATH'] = '${_javapathopt("-sourcepath", "JAVASOURCEPATH", "_JAVASOURCEPATHDEFAULT")} ' env['_JAVASOURCEPATHDEFAULT'] = '${TARGET.attributes.java_sourcedir}' env['_JAVACCOM'] = '$JAVAC $JAVACFLAGS $_JAVABOOTCLASSPATH $_JAVACLASSPATH -d ${TARGET.attributes.java_classdir} $_JAVASOURCEPATH $SOURCES' env['JAVACCOM'] = "${TEMPFILE('$_JAVACCOM','$JAVACCOMSTR')}" env['JAVACLASSSUFFIX'] = '.class' env['JAVASUFFIX'] = '.java'
def cardinality(self): """A tuple containing the cardinality for this Slot. The Python equivalent of the CLIPS deftemplate-slot-cardinality function. """ data = clips.data.DataObject(self._env) lib.EnvDeftemplateSlotCardinality( self._env, self._tpl, self._name, data.byref) return tuple(data.value) if isinstance(data.value, list) else ()
def function[cardinality, parameter[self]]: constant[A tuple containing the cardinality for this Slot. The Python equivalent of the CLIPS deftemplate-slot-cardinality function. ] variable[data] assign[=] call[name[clips].data.DataObject, parameter[name[self]._env]] call[name[lib].EnvDeftemplateSlotCardinality, parameter[name[self]._env, name[self]._tpl, name[self]._name, name[data].byref]] return[<ast.IfExp object at 0x7da1b056f790>]
keyword[def] identifier[cardinality] ( identifier[self] ): literal[string] identifier[data] = identifier[clips] . identifier[data] . identifier[DataObject] ( identifier[self] . identifier[_env] ) identifier[lib] . identifier[EnvDeftemplateSlotCardinality] ( identifier[self] . identifier[_env] , identifier[self] . identifier[_tpl] , identifier[self] . identifier[_name] , identifier[data] . identifier[byref] ) keyword[return] identifier[tuple] ( identifier[data] . identifier[value] ) keyword[if] identifier[isinstance] ( identifier[data] . identifier[value] , identifier[list] ) keyword[else] ()
def cardinality(self): """A tuple containing the cardinality for this Slot. The Python equivalent of the CLIPS deftemplate-slot-cardinality function. """ data = clips.data.DataObject(self._env) lib.EnvDeftemplateSlotCardinality(self._env, self._tpl, self._name, data.byref) return tuple(data.value) if isinstance(data.value, list) else ()
async def connect(self, client_id, conn_string): """Connect to a device on behalf of a client. See :meth:`AbstractDeviceAdapter.connect`. Args: client_id (str): The client we are working for. conn_string (str): A connection string that will be passed to the underlying device adapter to connect. Raises: DeviceServerError: There is an issue with your client_id. DeviceAdapterError: The adapter had an issue connecting. """ conn_id = self.adapter.unique_conn_id() self._client_info(client_id) await self.adapter.connect(conn_id, conn_string) self._hook_connect(conn_string, conn_id, client_id)
<ast.AsyncFunctionDef object at 0x7da18f58f640>
keyword[async] keyword[def] identifier[connect] ( identifier[self] , identifier[client_id] , identifier[conn_string] ): literal[string] identifier[conn_id] = identifier[self] . identifier[adapter] . identifier[unique_conn_id] () identifier[self] . identifier[_client_info] ( identifier[client_id] ) keyword[await] identifier[self] . identifier[adapter] . identifier[connect] ( identifier[conn_id] , identifier[conn_string] ) identifier[self] . identifier[_hook_connect] ( identifier[conn_string] , identifier[conn_id] , identifier[client_id] )
async def connect(self, client_id, conn_string): """Connect to a device on behalf of a client. See :meth:`AbstractDeviceAdapter.connect`. Args: client_id (str): The client we are working for. conn_string (str): A connection string that will be passed to the underlying device adapter to connect. Raises: DeviceServerError: There is an issue with your client_id. DeviceAdapterError: The adapter had an issue connecting. """ conn_id = self.adapter.unique_conn_id() self._client_info(client_id) await self.adapter.connect(conn_id, conn_string) self._hook_connect(conn_string, conn_id, client_id)
def slice(self, rng, num_of_slices=None, slice_pos=None, slice_start=None, slice_end=None, cache_dir=None): ''' Slices the data iterator so that newly generated data iterator has access to limited portion of the original data. Args: rng (numpy.random.RandomState): Random generator for Initializer. num_of_slices(int): Total number of slices to be made. Muts be used together with `slice_pos`. slice_pos(int): Position of the slice to be assigned to the new data iterator. Must be used together with `num_of_slices`. slice_start(int): Starting position of the range to be sliced into new data iterator. Must be used together with `slice_end`. slice_end(int) : End position of the range to be sliced into new data iterator. Must be used together with `slice_start`. cache_dir(str) : Directory to save cache files Example: .. code-block:: python from nnabla.utils.data_iterator import data_iterator_simple import numpy as np def load_func1(index): d = np.ones((2, 2)) * index return d di = data_iterator_simple(load_func1, 1000, batch_size=3) di_s1 = di.slice(None, num_of_slices=10, slice_pos=0) di_s2 = di.slice(None, num_of_slices=10, slice_pos=1) di_s3 = di.slice(None, slice_start=100, slice_end=200) di_s4 = di.slice(None, slice_start=300, slice_end=400) ''' if num_of_slices is not None and slice_pos is not None and slice_start is None and slice_end is None: size = self._size // num_of_slices amount = self._size % num_of_slices slice_start = slice_pos * size if slice_pos < amount: slice_start += slice_pos else: slice_start += amount slice_end = slice_start + size if slice_end > self._size: slice_start -= (slice_end - self._size) slice_end = self._size elif num_of_slices is None and slice_pos is None and slice_start is not None and slice_end is not None: pass else: logger.critical( 'You must specify position(num_of_slice and slice_pos) or range(slice_start and slice_end).') return None if cache_dir is None: ds = self._data_source while '_data_source' in dir(ds): if '_cache_dir' in dir(ds): cache_dir = ds._cache_dir ds = ds._data_source if cache_dir is None: return DataIterator( DataSourceWithMemoryCache( SlicedDataSource( self._data_source, self._data_source.shuffle, slice_start=slice_start, slice_end=slice_end), shuffle=self._shuffle, rng=rng), self._batch_size) else: return DataIterator( DataSourceWithMemoryCache( DataSourceWithFileCache( SlicedDataSource( self._data_source, self._data_source.shuffle, slice_start=slice_start, slice_end=slice_end), cache_dir=cache_dir, cache_file_name_prefix='cache_sliced_{:08d}_{:08d}'.format( slice_start, slice_end), shuffle=self._shuffle, rng=rng), shuffle=self._shuffle, rng=rng), self._batch_size)
def function[slice, parameter[self, rng, num_of_slices, slice_pos, slice_start, slice_end, cache_dir]]: constant[ Slices the data iterator so that newly generated data iterator has access to limited portion of the original data. Args: rng (numpy.random.RandomState): Random generator for Initializer. num_of_slices(int): Total number of slices to be made. Muts be used together with `slice_pos`. slice_pos(int): Position of the slice to be assigned to the new data iterator. Must be used together with `num_of_slices`. slice_start(int): Starting position of the range to be sliced into new data iterator. Must be used together with `slice_end`. slice_end(int) : End position of the range to be sliced into new data iterator. Must be used together with `slice_start`. cache_dir(str) : Directory to save cache files Example: .. code-block:: python from nnabla.utils.data_iterator import data_iterator_simple import numpy as np def load_func1(index): d = np.ones((2, 2)) * index return d di = data_iterator_simple(load_func1, 1000, batch_size=3) di_s1 = di.slice(None, num_of_slices=10, slice_pos=0) di_s2 = di.slice(None, num_of_slices=10, slice_pos=1) di_s3 = di.slice(None, slice_start=100, slice_end=200) di_s4 = di.slice(None, slice_start=300, slice_end=400) ] if <ast.BoolOp object at 0x7da20e954460> begin[:] variable[size] assign[=] binary_operation[name[self]._size <ast.FloorDiv object at 0x7da2590d6bc0> name[num_of_slices]] variable[amount] assign[=] binary_operation[name[self]._size <ast.Mod object at 0x7da2590d6920> name[num_of_slices]] variable[slice_start] assign[=] binary_operation[name[slice_pos] * name[size]] if compare[name[slice_pos] less[<] name[amount]] begin[:] <ast.AugAssign object at 0x7da20e954940> variable[slice_end] assign[=] binary_operation[name[slice_start] + name[size]] if compare[name[slice_end] greater[>] name[self]._size] begin[:] <ast.AugAssign object at 0x7da20e956050> variable[slice_end] assign[=] name[self]._size if compare[name[cache_dir] is constant[None]] begin[:] variable[ds] assign[=] name[self]._data_source while compare[constant[_data_source] in call[name[dir], parameter[name[ds]]]] begin[:] if compare[constant[_cache_dir] in call[name[dir], parameter[name[ds]]]] begin[:] variable[cache_dir] assign[=] name[ds]._cache_dir variable[ds] assign[=] name[ds]._data_source if compare[name[cache_dir] is constant[None]] begin[:] return[call[name[DataIterator], parameter[call[name[DataSourceWithMemoryCache], parameter[call[name[SlicedDataSource], parameter[name[self]._data_source, name[self]._data_source.shuffle]]]], name[self]._batch_size]]]
keyword[def] identifier[slice] ( identifier[self] , identifier[rng] , identifier[num_of_slices] = keyword[None] , identifier[slice_pos] = keyword[None] , identifier[slice_start] = keyword[None] , identifier[slice_end] = keyword[None] , identifier[cache_dir] = keyword[None] ): literal[string] keyword[if] identifier[num_of_slices] keyword[is] keyword[not] keyword[None] keyword[and] identifier[slice_pos] keyword[is] keyword[not] keyword[None] keyword[and] identifier[slice_start] keyword[is] keyword[None] keyword[and] identifier[slice_end] keyword[is] keyword[None] : identifier[size] = identifier[self] . identifier[_size] // identifier[num_of_slices] identifier[amount] = identifier[self] . identifier[_size] % identifier[num_of_slices] identifier[slice_start] = identifier[slice_pos] * identifier[size] keyword[if] identifier[slice_pos] < identifier[amount] : identifier[slice_start] += identifier[slice_pos] keyword[else] : identifier[slice_start] += identifier[amount] identifier[slice_end] = identifier[slice_start] + identifier[size] keyword[if] identifier[slice_end] > identifier[self] . identifier[_size] : identifier[slice_start] -=( identifier[slice_end] - identifier[self] . identifier[_size] ) identifier[slice_end] = identifier[self] . identifier[_size] keyword[elif] identifier[num_of_slices] keyword[is] keyword[None] keyword[and] identifier[slice_pos] keyword[is] keyword[None] keyword[and] identifier[slice_start] keyword[is] keyword[not] keyword[None] keyword[and] identifier[slice_end] keyword[is] keyword[not] keyword[None] : keyword[pass] keyword[else] : identifier[logger] . identifier[critical] ( literal[string] ) keyword[return] keyword[None] keyword[if] identifier[cache_dir] keyword[is] keyword[None] : identifier[ds] = identifier[self] . identifier[_data_source] keyword[while] literal[string] keyword[in] identifier[dir] ( identifier[ds] ): keyword[if] literal[string] keyword[in] identifier[dir] ( identifier[ds] ): identifier[cache_dir] = identifier[ds] . identifier[_cache_dir] identifier[ds] = identifier[ds] . identifier[_data_source] keyword[if] identifier[cache_dir] keyword[is] keyword[None] : keyword[return] identifier[DataIterator] ( identifier[DataSourceWithMemoryCache] ( identifier[SlicedDataSource] ( identifier[self] . identifier[_data_source] , identifier[self] . identifier[_data_source] . identifier[shuffle] , identifier[slice_start] = identifier[slice_start] , identifier[slice_end] = identifier[slice_end] ), identifier[shuffle] = identifier[self] . identifier[_shuffle] , identifier[rng] = identifier[rng] ), identifier[self] . identifier[_batch_size] ) keyword[else] : keyword[return] identifier[DataIterator] ( identifier[DataSourceWithMemoryCache] ( identifier[DataSourceWithFileCache] ( identifier[SlicedDataSource] ( identifier[self] . identifier[_data_source] , identifier[self] . identifier[_data_source] . identifier[shuffle] , identifier[slice_start] = identifier[slice_start] , identifier[slice_end] = identifier[slice_end] ), identifier[cache_dir] = identifier[cache_dir] , identifier[cache_file_name_prefix] = literal[string] . identifier[format] ( identifier[slice_start] , identifier[slice_end] ), identifier[shuffle] = identifier[self] . identifier[_shuffle] , identifier[rng] = identifier[rng] ), identifier[shuffle] = identifier[self] . identifier[_shuffle] , identifier[rng] = identifier[rng] ), identifier[self] . identifier[_batch_size] )
def slice(self, rng, num_of_slices=None, slice_pos=None, slice_start=None, slice_end=None, cache_dir=None): """ Slices the data iterator so that newly generated data iterator has access to limited portion of the original data. Args: rng (numpy.random.RandomState): Random generator for Initializer. num_of_slices(int): Total number of slices to be made. Muts be used together with `slice_pos`. slice_pos(int): Position of the slice to be assigned to the new data iterator. Must be used together with `num_of_slices`. slice_start(int): Starting position of the range to be sliced into new data iterator. Must be used together with `slice_end`. slice_end(int) : End position of the range to be sliced into new data iterator. Must be used together with `slice_start`. cache_dir(str) : Directory to save cache files Example: .. code-block:: python from nnabla.utils.data_iterator import data_iterator_simple import numpy as np def load_func1(index): d = np.ones((2, 2)) * index return d di = data_iterator_simple(load_func1, 1000, batch_size=3) di_s1 = di.slice(None, num_of_slices=10, slice_pos=0) di_s2 = di.slice(None, num_of_slices=10, slice_pos=1) di_s3 = di.slice(None, slice_start=100, slice_end=200) di_s4 = di.slice(None, slice_start=300, slice_end=400) """ if num_of_slices is not None and slice_pos is not None and (slice_start is None) and (slice_end is None): size = self._size // num_of_slices amount = self._size % num_of_slices slice_start = slice_pos * size if slice_pos < amount: slice_start += slice_pos # depends on [control=['if'], data=['slice_pos']] else: slice_start += amount slice_end = slice_start + size if slice_end > self._size: slice_start -= slice_end - self._size slice_end = self._size # depends on [control=['if'], data=['slice_end']] # depends on [control=['if'], data=[]] elif num_of_slices is None and slice_pos is None and (slice_start is not None) and (slice_end is not None): pass # depends on [control=['if'], data=[]] else: logger.critical('You must specify position(num_of_slice and slice_pos) or range(slice_start and slice_end).') return None if cache_dir is None: ds = self._data_source while '_data_source' in dir(ds): if '_cache_dir' in dir(ds): cache_dir = ds._cache_dir # depends on [control=['if'], data=[]] ds = ds._data_source # depends on [control=['while'], data=[]] # depends on [control=['if'], data=['cache_dir']] if cache_dir is None: return DataIterator(DataSourceWithMemoryCache(SlicedDataSource(self._data_source, self._data_source.shuffle, slice_start=slice_start, slice_end=slice_end), shuffle=self._shuffle, rng=rng), self._batch_size) # depends on [control=['if'], data=[]] else: return DataIterator(DataSourceWithMemoryCache(DataSourceWithFileCache(SlicedDataSource(self._data_source, self._data_source.shuffle, slice_start=slice_start, slice_end=slice_end), cache_dir=cache_dir, cache_file_name_prefix='cache_sliced_{:08d}_{:08d}'.format(slice_start, slice_end), shuffle=self._shuffle, rng=rng), shuffle=self._shuffle, rng=rng), self._batch_size)
def slabs(request, server_name): """ Show server slabs. """ data = _context_data({ 'title': _('Memcache Slabs for %s') % server_name, 'cache_slabs': _get_cache_slabs(server_name), }, request) return render_to_response('memcache_admin/slabs.html', data, RequestContext(request))
def function[slabs, parameter[request, server_name]]: constant[ Show server slabs. ] variable[data] assign[=] call[name[_context_data], parameter[dictionary[[<ast.Constant object at 0x7da1b013e8f0>, <ast.Constant object at 0x7da1b013c790>], [<ast.BinOp object at 0x7da1b013f8b0>, <ast.Call object at 0x7da1b013f2b0>]], name[request]]] return[call[name[render_to_response], parameter[constant[memcache_admin/slabs.html], name[data], call[name[RequestContext], parameter[name[request]]]]]]
keyword[def] identifier[slabs] ( identifier[request] , identifier[server_name] ): literal[string] identifier[data] = identifier[_context_data] ({ literal[string] : identifier[_] ( literal[string] )% identifier[server_name] , literal[string] : identifier[_get_cache_slabs] ( identifier[server_name] ), }, identifier[request] ) keyword[return] identifier[render_to_response] ( literal[string] , identifier[data] , identifier[RequestContext] ( identifier[request] ))
def slabs(request, server_name): """ Show server slabs. """ data = _context_data({'title': _('Memcache Slabs for %s') % server_name, 'cache_slabs': _get_cache_slabs(server_name)}, request) return render_to_response('memcache_admin/slabs.html', data, RequestContext(request))
def disconnect_node(node, src=True, dst=True): """Disconnect all connections from node :param node: the node to disconnect :type node: str :returns: None :rtype: None :raises: None """ if dst: destconns = cmds.listConnections(node, connections=True, plugs=True, source=False) or [] for i in range(0, len(destconns), 2): source, dest = destconns[i], destconns[i+1] cmds.disconnectAttr(source, dest) if src: srcconns = cmds.listConnections(node, connections=True, plugs=True, destination=False) or [] for i in range(0, len(srcconns), 2): source, dest = srcconns[i+1], srcconns[i] cmds.disconnectAttr(source, dest)
def function[disconnect_node, parameter[node, src, dst]]: constant[Disconnect all connections from node :param node: the node to disconnect :type node: str :returns: None :rtype: None :raises: None ] if name[dst] begin[:] variable[destconns] assign[=] <ast.BoolOp object at 0x7da18fe91210> for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[destconns]]], constant[2]]]] begin[:] <ast.Tuple object at 0x7da18fe90f10> assign[=] tuple[[<ast.Subscript object at 0x7da18fe92140>, <ast.Subscript object at 0x7da18fe90130>]] call[name[cmds].disconnectAttr, parameter[name[source], name[dest]]] if name[src] begin[:] variable[srcconns] assign[=] <ast.BoolOp object at 0x7da18fe93af0> for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[srcconns]]], constant[2]]]] begin[:] <ast.Tuple object at 0x7da18f09e020> assign[=] tuple[[<ast.Subscript object at 0x7da18f09fac0>, <ast.Subscript object at 0x7da18f09f550>]] call[name[cmds].disconnectAttr, parameter[name[source], name[dest]]]
keyword[def] identifier[disconnect_node] ( identifier[node] , identifier[src] = keyword[True] , identifier[dst] = keyword[True] ): literal[string] keyword[if] identifier[dst] : identifier[destconns] = identifier[cmds] . identifier[listConnections] ( identifier[node] , identifier[connections] = keyword[True] , identifier[plugs] = keyword[True] , identifier[source] = keyword[False] ) keyword[or] [] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[destconns] ), literal[int] ): identifier[source] , identifier[dest] = identifier[destconns] [ identifier[i] ], identifier[destconns] [ identifier[i] + literal[int] ] identifier[cmds] . identifier[disconnectAttr] ( identifier[source] , identifier[dest] ) keyword[if] identifier[src] : identifier[srcconns] = identifier[cmds] . identifier[listConnections] ( identifier[node] , identifier[connections] = keyword[True] , identifier[plugs] = keyword[True] , identifier[destination] = keyword[False] ) keyword[or] [] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[srcconns] ), literal[int] ): identifier[source] , identifier[dest] = identifier[srcconns] [ identifier[i] + literal[int] ], identifier[srcconns] [ identifier[i] ] identifier[cmds] . identifier[disconnectAttr] ( identifier[source] , identifier[dest] )
def disconnect_node(node, src=True, dst=True): """Disconnect all connections from node :param node: the node to disconnect :type node: str :returns: None :rtype: None :raises: None """ if dst: destconns = cmds.listConnections(node, connections=True, plugs=True, source=False) or [] for i in range(0, len(destconns), 2): (source, dest) = (destconns[i], destconns[i + 1]) cmds.disconnectAttr(source, dest) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] if src: srcconns = cmds.listConnections(node, connections=True, plugs=True, destination=False) or [] for i in range(0, len(srcconns), 2): (source, dest) = (srcconns[i + 1], srcconns[i]) cmds.disconnectAttr(source, dest) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
def other(wxcodes: typing.List[str]) -> str: """ Format wx codes into a spoken word string """ ret = [] for code in wxcodes: item = translate.wxcode(code) if item.startswith('Vicinity'): item = item.lstrip('Vicinity ') + ' in the Vicinity' ret.append(item) return '. '.join(ret)
def function[other, parameter[wxcodes]]: constant[ Format wx codes into a spoken word string ] variable[ret] assign[=] list[[]] for taget[name[code]] in starred[name[wxcodes]] begin[:] variable[item] assign[=] call[name[translate].wxcode, parameter[name[code]]] if call[name[item].startswith, parameter[constant[Vicinity]]] begin[:] variable[item] assign[=] binary_operation[call[name[item].lstrip, parameter[constant[Vicinity ]]] + constant[ in the Vicinity]] call[name[ret].append, parameter[name[item]]] return[call[constant[. ].join, parameter[name[ret]]]]
keyword[def] identifier[other] ( identifier[wxcodes] : identifier[typing] . identifier[List] [ identifier[str] ])-> identifier[str] : literal[string] identifier[ret] =[] keyword[for] identifier[code] keyword[in] identifier[wxcodes] : identifier[item] = identifier[translate] . identifier[wxcode] ( identifier[code] ) keyword[if] identifier[item] . identifier[startswith] ( literal[string] ): identifier[item] = identifier[item] . identifier[lstrip] ( literal[string] )+ literal[string] identifier[ret] . identifier[append] ( identifier[item] ) keyword[return] literal[string] . identifier[join] ( identifier[ret] )
def other(wxcodes: typing.List[str]) -> str: """ Format wx codes into a spoken word string """ ret = [] for code in wxcodes: item = translate.wxcode(code) if item.startswith('Vicinity'): item = item.lstrip('Vicinity ') + ' in the Vicinity' # depends on [control=['if'], data=[]] ret.append(item) # depends on [control=['for'], data=['code']] return '. '.join(ret)
def gridnet(np, pfile, plenfile, tlenfile, gordfile, outlet=None, workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): """Run gridnet""" fname = TauDEM.func_name('gridnet') return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {'-p': pfile, '-o': outlet}, workingdir, None, {'-plen': plenfile, '-tlen': tlenfile, '-gord': gordfile}, {'mpipath': mpiexedir, 'hostfile': hostfile, 'n': np}, {'logfile': log_file, 'runtimefile': runtime_file})
def function[gridnet, parameter[np, pfile, plenfile, tlenfile, gordfile, outlet, workingdir, mpiexedir, exedir, log_file, runtime_file, hostfile]]: constant[Run gridnet] variable[fname] assign[=] call[name[TauDEM].func_name, parameter[constant[gridnet]]] return[call[name[TauDEM].run, parameter[call[name[FileClass].get_executable_fullpath, parameter[name[fname], name[exedir]]], dictionary[[<ast.Constant object at 0x7da1b2594d30>, <ast.Constant object at 0x7da1b2594340>], [<ast.Name object at 0x7da1b2595cf0>, <ast.Name object at 0x7da1b25943a0>]], name[workingdir], constant[None], dictionary[[<ast.Constant object at 0x7da1b2595840>, <ast.Constant object at 0x7da1b25961a0>, <ast.Constant object at 0x7da1b25975b0>], [<ast.Name object at 0x7da1b2597af0>, <ast.Name object at 0x7da1b25940d0>, <ast.Name object at 0x7da1b2594b20>]], dictionary[[<ast.Constant object at 0x7da1b25dbd90>, <ast.Constant object at 0x7da1b25db0a0>, <ast.Constant object at 0x7da1b25dbc70>], [<ast.Name object at 0x7da1b25db100>, <ast.Name object at 0x7da1b25dbc40>, <ast.Name object at 0x7da1b25dbcd0>]], dictionary[[<ast.Constant object at 0x7da1b25db160>, <ast.Constant object at 0x7da1b25dbc10>], [<ast.Name object at 0x7da1b25dbd00>, <ast.Name object at 0x7da1b25dbb80>]]]]]
keyword[def] identifier[gridnet] ( identifier[np] , identifier[pfile] , identifier[plenfile] , identifier[tlenfile] , identifier[gordfile] , identifier[outlet] = keyword[None] , identifier[workingdir] = keyword[None] , identifier[mpiexedir] = keyword[None] , identifier[exedir] = keyword[None] , identifier[log_file] = keyword[None] , identifier[runtime_file] = keyword[None] , identifier[hostfile] = keyword[None] ): literal[string] identifier[fname] = identifier[TauDEM] . identifier[func_name] ( literal[string] ) keyword[return] identifier[TauDEM] . identifier[run] ( identifier[FileClass] . identifier[get_executable_fullpath] ( identifier[fname] , identifier[exedir] ), { literal[string] : identifier[pfile] , literal[string] : identifier[outlet] }, identifier[workingdir] , keyword[None] , { literal[string] : identifier[plenfile] , literal[string] : identifier[tlenfile] , literal[string] : identifier[gordfile] }, { literal[string] : identifier[mpiexedir] , literal[string] : identifier[hostfile] , literal[string] : identifier[np] }, { literal[string] : identifier[log_file] , literal[string] : identifier[runtime_file] })
def gridnet(np, pfile, plenfile, tlenfile, gordfile, outlet=None, workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): """Run gridnet""" fname = TauDEM.func_name('gridnet') return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {'-p': pfile, '-o': outlet}, workingdir, None, {'-plen': plenfile, '-tlen': tlenfile, '-gord': gordfile}, {'mpipath': mpiexedir, 'hostfile': hostfile, 'n': np}, {'logfile': log_file, 'runtimefile': runtime_file})
def dt2ts(dt, drop_micro=False): ''' convert datetime objects to timestamp seconds (float) ''' is_true(HAS_DATEUTIL, "`pip install python_dateutil` required") if is_empty(dt, except_=False): ts = None elif isinstance(dt, (int, long, float)): # its a ts already ts = float(dt) elif isinstance(dt, basestring): # convert to datetime first try: parsed_dt = float(dt) except (TypeError, ValueError): parsed_dt = dt_parse(dt) ts = dt2ts(parsed_dt) else: assert isinstance(dt, (datetime, date)) # keep micros; see: http://stackoverflow.com/questions/7031031 ts = (( timegm(dt.timetuple()) * 1000.0) + (dt.microsecond / 1000.0)) / 1000.0 if ts is None: pass elif drop_micro: ts = float(int(ts)) else: ts = float(ts) return ts
def function[dt2ts, parameter[dt, drop_micro]]: constant[ convert datetime objects to timestamp seconds (float) ] call[name[is_true], parameter[name[HAS_DATEUTIL], constant[`pip install python_dateutil` required]]] if call[name[is_empty], parameter[name[dt]]] begin[:] variable[ts] assign[=] constant[None] if compare[name[ts] is constant[None]] begin[:] pass return[name[ts]]
keyword[def] identifier[dt2ts] ( identifier[dt] , identifier[drop_micro] = keyword[False] ): literal[string] identifier[is_true] ( identifier[HAS_DATEUTIL] , literal[string] ) keyword[if] identifier[is_empty] ( identifier[dt] , identifier[except_] = keyword[False] ): identifier[ts] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[dt] ,( identifier[int] , identifier[long] , identifier[float] )): identifier[ts] = identifier[float] ( identifier[dt] ) keyword[elif] identifier[isinstance] ( identifier[dt] , identifier[basestring] ): keyword[try] : identifier[parsed_dt] = identifier[float] ( identifier[dt] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): identifier[parsed_dt] = identifier[dt_parse] ( identifier[dt] ) identifier[ts] = identifier[dt2ts] ( identifier[parsed_dt] ) keyword[else] : keyword[assert] identifier[isinstance] ( identifier[dt] ,( identifier[datetime] , identifier[date] )) identifier[ts] =(( identifier[timegm] ( identifier[dt] . identifier[timetuple] ())* literal[int] )+ ( identifier[dt] . identifier[microsecond] / literal[int] ))/ literal[int] keyword[if] identifier[ts] keyword[is] keyword[None] : keyword[pass] keyword[elif] identifier[drop_micro] : identifier[ts] = identifier[float] ( identifier[int] ( identifier[ts] )) keyword[else] : identifier[ts] = identifier[float] ( identifier[ts] ) keyword[return] identifier[ts]
def dt2ts(dt, drop_micro=False): """ convert datetime objects to timestamp seconds (float) """ is_true(HAS_DATEUTIL, '`pip install python_dateutil` required') if is_empty(dt, except_=False): ts = None # depends on [control=['if'], data=[]] elif isinstance(dt, (int, long, float)): # its a ts already ts = float(dt) # depends on [control=['if'], data=[]] elif isinstance(dt, basestring): # convert to datetime first try: parsed_dt = float(dt) # depends on [control=['try'], data=[]] except (TypeError, ValueError): parsed_dt = dt_parse(dt) # depends on [control=['except'], data=[]] ts = dt2ts(parsed_dt) # depends on [control=['if'], data=[]] else: assert isinstance(dt, (datetime, date)) # keep micros; see: http://stackoverflow.com/questions/7031031 ts = (timegm(dt.timetuple()) * 1000.0 + dt.microsecond / 1000.0) / 1000.0 if ts is None: pass # depends on [control=['if'], data=[]] elif drop_micro: ts = float(int(ts)) # depends on [control=['if'], data=[]] else: ts = float(ts) return ts
def updateCategory(self,name,nmin=None,n=None,nmax=None): """ Smartly updates the given category. Only values that are given will be updated, others will be left unchanged. If the category does not exist, a :py:exc:`KeyError` will be thrown. Use :py:meth:`addCategory()` instead if you want to add a category. """ # smart update, only stuff that was given if name not in self.categories: raise KeyError("No Category with name '%s'"%name) if nmin is not None: self.categories[name][0]=nmin if n is not None: self.categories[name][1]=n if nmax is not None: self.categories[name][2]=nmax self.redraw() self.doAction("progresschange")
def function[updateCategory, parameter[self, name, nmin, n, nmax]]: constant[ Smartly updates the given category. Only values that are given will be updated, others will be left unchanged. If the category does not exist, a :py:exc:`KeyError` will be thrown. Use :py:meth:`addCategory()` instead if you want to add a category. ] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].categories] begin[:] <ast.Raise object at 0x7da1b02417b0> if compare[name[nmin] is_not constant[None]] begin[:] call[call[name[self].categories][name[name]]][constant[0]] assign[=] name[nmin] if compare[name[n] is_not constant[None]] begin[:] call[call[name[self].categories][name[name]]][constant[1]] assign[=] name[n] if compare[name[nmax] is_not constant[None]] begin[:] call[call[name[self].categories][name[name]]][constant[2]] assign[=] name[nmax] call[name[self].redraw, parameter[]] call[name[self].doAction, parameter[constant[progresschange]]]
keyword[def] identifier[updateCategory] ( identifier[self] , identifier[name] , identifier[nmin] = keyword[None] , identifier[n] = keyword[None] , identifier[nmax] = keyword[None] ): literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[categories] : keyword[raise] identifier[KeyError] ( literal[string] % identifier[name] ) keyword[if] identifier[nmin] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[categories] [ identifier[name] ][ literal[int] ]= identifier[nmin] keyword[if] identifier[n] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[categories] [ identifier[name] ][ literal[int] ]= identifier[n] keyword[if] identifier[nmax] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[categories] [ identifier[name] ][ literal[int] ]= identifier[nmax] identifier[self] . identifier[redraw] () identifier[self] . identifier[doAction] ( literal[string] )
def updateCategory(self, name, nmin=None, n=None, nmax=None): """ Smartly updates the given category. Only values that are given will be updated, others will be left unchanged. If the category does not exist, a :py:exc:`KeyError` will be thrown. Use :py:meth:`addCategory()` instead if you want to add a category. """ # smart update, only stuff that was given if name not in self.categories: raise KeyError("No Category with name '%s'" % name) # depends on [control=['if'], data=['name']] if nmin is not None: self.categories[name][0] = nmin # depends on [control=['if'], data=['nmin']] if n is not None: self.categories[name][1] = n # depends on [control=['if'], data=['n']] if nmax is not None: self.categories[name][2] = nmax # depends on [control=['if'], data=['nmax']] self.redraw() self.doAction('progresschange')
def get_formatter(self, handler): """ Return formatters according to handler. All handlers are the same format, except syslog. We omit time when syslogging. """ if isinstance(handler, logging.handlers.SysLogHandler): formatter = '[%(levelname)-9s]' else: formatter = '[%(asctime)s] [%(levelname)-9s]' for p in self.prefix: formatter += ' [%s]' % (p) formatter = formatter + ' %(message)s' return logging.Formatter(formatter)
def function[get_formatter, parameter[self, handler]]: constant[ Return formatters according to handler. All handlers are the same format, except syslog. We omit time when syslogging. ] if call[name[isinstance], parameter[name[handler], name[logging].handlers.SysLogHandler]] begin[:] variable[formatter] assign[=] constant[[%(levelname)-9s]] for taget[name[p]] in starred[name[self].prefix] begin[:] <ast.AugAssign object at 0x7da20c6a8460> variable[formatter] assign[=] binary_operation[name[formatter] + constant[ %(message)s]] return[call[name[logging].Formatter, parameter[name[formatter]]]]
keyword[def] identifier[get_formatter] ( identifier[self] , identifier[handler] ): literal[string] keyword[if] identifier[isinstance] ( identifier[handler] , identifier[logging] . identifier[handlers] . identifier[SysLogHandler] ): identifier[formatter] = literal[string] keyword[else] : identifier[formatter] = literal[string] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[prefix] : identifier[formatter] += literal[string] %( identifier[p] ) identifier[formatter] = identifier[formatter] + literal[string] keyword[return] identifier[logging] . identifier[Formatter] ( identifier[formatter] )
def get_formatter(self, handler): """ Return formatters according to handler. All handlers are the same format, except syslog. We omit time when syslogging. """ if isinstance(handler, logging.handlers.SysLogHandler): formatter = '[%(levelname)-9s]' # depends on [control=['if'], data=[]] else: formatter = '[%(asctime)s] [%(levelname)-9s]' for p in self.prefix: formatter += ' [%s]' % p # depends on [control=['for'], data=['p']] formatter = formatter + ' %(message)s' return logging.Formatter(formatter)
def add_pending_greenlet(self, greenlet: Greenlet): """ Ensures an error on the passed greenlet crashes self/main greenlet. """ def remove(_): self.greenlets.remove(greenlet) self.greenlets.append(greenlet) greenlet.link_exception(self.on_error) greenlet.link_value(remove)
def function[add_pending_greenlet, parameter[self, greenlet]]: constant[ Ensures an error on the passed greenlet crashes self/main greenlet. ] def function[remove, parameter[_]]: call[name[self].greenlets.remove, parameter[name[greenlet]]] call[name[self].greenlets.append, parameter[name[greenlet]]] call[name[greenlet].link_exception, parameter[name[self].on_error]] call[name[greenlet].link_value, parameter[name[remove]]]
keyword[def] identifier[add_pending_greenlet] ( identifier[self] , identifier[greenlet] : identifier[Greenlet] ): literal[string] keyword[def] identifier[remove] ( identifier[_] ): identifier[self] . identifier[greenlets] . identifier[remove] ( identifier[greenlet] ) identifier[self] . identifier[greenlets] . identifier[append] ( identifier[greenlet] ) identifier[greenlet] . identifier[link_exception] ( identifier[self] . identifier[on_error] ) identifier[greenlet] . identifier[link_value] ( identifier[remove] )
def add_pending_greenlet(self, greenlet: Greenlet): """ Ensures an error on the passed greenlet crashes self/main greenlet. """ def remove(_): self.greenlets.remove(greenlet) self.greenlets.append(greenlet) greenlet.link_exception(self.on_error) greenlet.link_value(remove)
def advance_robots(self): '''Produces a new game state in which the robots have advanced towards the player by one step. Handles the robots crashing into one another too.''' # move the robots towards the player self = lens.robots.Each().call_step_towards(self.player)(self) # robots in the same place are crashes self = lens.crashes.call_union(duplicates(self.robots))(self) # remove crashed robots self = lens.robots.modify(lambda r: list(set(r) - self.crashes))(self) return self
def function[advance_robots, parameter[self]]: constant[Produces a new game state in which the robots have advanced towards the player by one step. Handles the robots crashing into one another too.] variable[self] assign[=] call[call[call[name[lens].robots.Each, parameter[]].call_step_towards, parameter[name[self].player]], parameter[name[self]]] variable[self] assign[=] call[call[name[lens].crashes.call_union, parameter[call[name[duplicates], parameter[name[self].robots]]]], parameter[name[self]]] variable[self] assign[=] call[call[name[lens].robots.modify, parameter[<ast.Lambda object at 0x7da1b0355150>]], parameter[name[self]]] return[name[self]]
keyword[def] identifier[advance_robots] ( identifier[self] ): literal[string] identifier[self] = identifier[lens] . identifier[robots] . identifier[Each] (). identifier[call_step_towards] ( identifier[self] . identifier[player] )( identifier[self] ) identifier[self] = identifier[lens] . identifier[crashes] . identifier[call_union] ( identifier[duplicates] ( identifier[self] . identifier[robots] ))( identifier[self] ) identifier[self] = identifier[lens] . identifier[robots] . identifier[modify] ( keyword[lambda] identifier[r] : identifier[list] ( identifier[set] ( identifier[r] )- identifier[self] . identifier[crashes] ))( identifier[self] ) keyword[return] identifier[self]
def advance_robots(self): """Produces a new game state in which the robots have advanced towards the player by one step. Handles the robots crashing into one another too.""" # move the robots towards the player self = lens.robots.Each().call_step_towards(self.player)(self) # robots in the same place are crashes self = lens.crashes.call_union(duplicates(self.robots))(self) # remove crashed robots self = lens.robots.modify(lambda r: list(set(r) - self.crashes))(self) return self
def save(variable, filename): """Save variable on given path using Pickle Args: variable: what to save path (str): path of the output """ fileObj = open(filename, 'wb') pickle.dump(variable, fileObj) fileObj.close()
def function[save, parameter[variable, filename]]: constant[Save variable on given path using Pickle Args: variable: what to save path (str): path of the output ] variable[fileObj] assign[=] call[name[open], parameter[name[filename], constant[wb]]] call[name[pickle].dump, parameter[name[variable], name[fileObj]]] call[name[fileObj].close, parameter[]]
keyword[def] identifier[save] ( identifier[variable] , identifier[filename] ): literal[string] identifier[fileObj] = identifier[open] ( identifier[filename] , literal[string] ) identifier[pickle] . identifier[dump] ( identifier[variable] , identifier[fileObj] ) identifier[fileObj] . identifier[close] ()
def save(variable, filename): """Save variable on given path using Pickle Args: variable: what to save path (str): path of the output """ fileObj = open(filename, 'wb') pickle.dump(variable, fileObj) fileObj.close()
def _adjust_regs(self): """ Adjust bp and sp w.r.t. stack difference between GDB session and angr. This matches sp and bp registers, but there is a high risk of pointers inconsistencies. """ if not self.adjust_stack: return bp = self.state.arch.register_names[self.state.arch.bp_offset] sp = self.state.arch.register_names[self.state.arch.sp_offset] stack_shift = self.state.arch.initial_sp - self.real_stack_top self.state.registers.store(sp, self.state.regs.sp + stack_shift) if not self.omit_fp: self.state.registers.store(bp, self.state.regs.bp + stack_shift)
def function[_adjust_regs, parameter[self]]: constant[ Adjust bp and sp w.r.t. stack difference between GDB session and angr. This matches sp and bp registers, but there is a high risk of pointers inconsistencies. ] if <ast.UnaryOp object at 0x7da1b26afc40> begin[:] return[None] variable[bp] assign[=] call[name[self].state.arch.register_names][name[self].state.arch.bp_offset] variable[sp] assign[=] call[name[self].state.arch.register_names][name[self].state.arch.sp_offset] variable[stack_shift] assign[=] binary_operation[name[self].state.arch.initial_sp - name[self].real_stack_top] call[name[self].state.registers.store, parameter[name[sp], binary_operation[name[self].state.regs.sp + name[stack_shift]]]] if <ast.UnaryOp object at 0x7da1b26af970> begin[:] call[name[self].state.registers.store, parameter[name[bp], binary_operation[name[self].state.regs.bp + name[stack_shift]]]]
keyword[def] identifier[_adjust_regs] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[adjust_stack] : keyword[return] identifier[bp] = identifier[self] . identifier[state] . identifier[arch] . identifier[register_names] [ identifier[self] . identifier[state] . identifier[arch] . identifier[bp_offset] ] identifier[sp] = identifier[self] . identifier[state] . identifier[arch] . identifier[register_names] [ identifier[self] . identifier[state] . identifier[arch] . identifier[sp_offset] ] identifier[stack_shift] = identifier[self] . identifier[state] . identifier[arch] . identifier[initial_sp] - identifier[self] . identifier[real_stack_top] identifier[self] . identifier[state] . identifier[registers] . identifier[store] ( identifier[sp] , identifier[self] . identifier[state] . identifier[regs] . identifier[sp] + identifier[stack_shift] ) keyword[if] keyword[not] identifier[self] . identifier[omit_fp] : identifier[self] . identifier[state] . identifier[registers] . identifier[store] ( identifier[bp] , identifier[self] . identifier[state] . identifier[regs] . identifier[bp] + identifier[stack_shift] )
def _adjust_regs(self): """ Adjust bp and sp w.r.t. stack difference between GDB session and angr. This matches sp and bp registers, but there is a high risk of pointers inconsistencies. """ if not self.adjust_stack: return # depends on [control=['if'], data=[]] bp = self.state.arch.register_names[self.state.arch.bp_offset] sp = self.state.arch.register_names[self.state.arch.sp_offset] stack_shift = self.state.arch.initial_sp - self.real_stack_top self.state.registers.store(sp, self.state.regs.sp + stack_shift) if not self.omit_fp: self.state.registers.store(bp, self.state.regs.bp + stack_shift) # depends on [control=['if'], data=[]]
def _getVirtualScreenRect(self): """ Returns the rect of all attached screens as (x, y, w, h) """ monitors = self.getScreenDetails() x1 = min([s["rect"][0] for s in monitors]) y1 = min([s["rect"][1] for s in monitors]) x2 = max([s["rect"][0]+s["rect"][2] for s in monitors]) y2 = max([s["rect"][1]+s["rect"][3] for s in monitors]) return (x1, y1, x2-x1, y2-y1)
def function[_getVirtualScreenRect, parameter[self]]: constant[ Returns the rect of all attached screens as (x, y, w, h) ] variable[monitors] assign[=] call[name[self].getScreenDetails, parameter[]] variable[x1] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da18dc98790>]] variable[y1] assign[=] call[name[min], parameter[<ast.ListComp object at 0x7da18dc9a440>]] variable[x2] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da18dc9ad10>]] variable[y2] assign[=] call[name[max], parameter[<ast.ListComp object at 0x7da2041da920>]] return[tuple[[<ast.Name object at 0x7da2041da260>, <ast.Name object at 0x7da2041dbf40>, <ast.BinOp object at 0x7da2041d9840>, <ast.BinOp object at 0x7da2041db070>]]]
keyword[def] identifier[_getVirtualScreenRect] ( identifier[self] ): literal[string] identifier[monitors] = identifier[self] . identifier[getScreenDetails] () identifier[x1] = identifier[min] ([ identifier[s] [ literal[string] ][ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[monitors] ]) identifier[y1] = identifier[min] ([ identifier[s] [ literal[string] ][ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[monitors] ]) identifier[x2] = identifier[max] ([ identifier[s] [ literal[string] ][ literal[int] ]+ identifier[s] [ literal[string] ][ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[monitors] ]) identifier[y2] = identifier[max] ([ identifier[s] [ literal[string] ][ literal[int] ]+ identifier[s] [ literal[string] ][ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[monitors] ]) keyword[return] ( identifier[x1] , identifier[y1] , identifier[x2] - identifier[x1] , identifier[y2] - identifier[y1] )
def _getVirtualScreenRect(self): """ Returns the rect of all attached screens as (x, y, w, h) """ monitors = self.getScreenDetails() x1 = min([s['rect'][0] for s in monitors]) y1 = min([s['rect'][1] for s in monitors]) x2 = max([s['rect'][0] + s['rect'][2] for s in monitors]) y2 = max([s['rect'][1] + s['rect'][3] for s in monitors]) return (x1, y1, x2 - x1, y2 - y1)
def registerResponse(self, node, vendorSpecific=None): """CNRegister.register(session, node) → NodeReference https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNRegister.register. Args: node: vendorSpecific: Returns: """ mmp_dict = {'node': ('node.xml', node.toxml('utf-8'))} return self.POST('node', fields=mmp_dict, headers=vendorSpecific)
def function[registerResponse, parameter[self, node, vendorSpecific]]: constant[CNRegister.register(session, node) → NodeReference https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNRegister.register. Args: node: vendorSpecific: Returns: ] variable[mmp_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a2d5a0>], [<ast.Tuple object at 0x7da1b1a2d630>]] return[call[name[self].POST, parameter[constant[node]]]]
keyword[def] identifier[registerResponse] ( identifier[self] , identifier[node] , identifier[vendorSpecific] = keyword[None] ): literal[string] identifier[mmp_dict] ={ literal[string] :( literal[string] , identifier[node] . identifier[toxml] ( literal[string] ))} keyword[return] identifier[self] . identifier[POST] ( literal[string] , identifier[fields] = identifier[mmp_dict] , identifier[headers] = identifier[vendorSpecific] )
def registerResponse(self, node, vendorSpecific=None): """CNRegister.register(session, node) → NodeReference https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNRegister.register. Args: node: vendorSpecific: Returns: """ mmp_dict = {'node': ('node.xml', node.toxml('utf-8'))} return self.POST('node', fields=mmp_dict, headers=vendorSpecific)
def list_of_lists_to_dict(l): """ Convert list of key,value lists to dict [['id', 1], ['id', 2], ['id', 3], ['foo': 4]] {'id': [1, 2, 3], 'foo': [4]} """ d = {} for key, val in l: d.setdefault(key, []).append(val) return d
def function[list_of_lists_to_dict, parameter[l]]: constant[ Convert list of key,value lists to dict [['id', 1], ['id', 2], ['id', 3], ['foo': 4]] {'id': [1, 2, 3], 'foo': [4]} ] variable[d] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da207f00df0>, <ast.Name object at 0x7da207f02bf0>]]] in starred[name[l]] begin[:] call[call[name[d].setdefault, parameter[name[key], list[[]]]].append, parameter[name[val]]] return[name[d]]
keyword[def] identifier[list_of_lists_to_dict] ( identifier[l] ): literal[string] identifier[d] ={} keyword[for] identifier[key] , identifier[val] keyword[in] identifier[l] : identifier[d] . identifier[setdefault] ( identifier[key] ,[]). identifier[append] ( identifier[val] ) keyword[return] identifier[d]
def list_of_lists_to_dict(l): """ Convert list of key,value lists to dict [['id', 1], ['id', 2], ['id', 3], ['foo': 4]] {'id': [1, 2, 3], 'foo': [4]} """ d = {} for (key, val) in l: d.setdefault(key, []).append(val) # depends on [control=['for'], data=[]] return d
def get_profile(self, profile_count=50): """ Get profile of query execution time. :param int profile_count: Number of profiles to retrieve, counted from the top query in descending order by the cumulative execution time. :return: Profile information for each query. :rtype: list of |namedtuple| :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :raises simplesqlite.OperationalError: |raises_operational_error| :Example: :ref:`example-get-profile` """ from collections import namedtuple profile_table_name = "sql_profile" value_matrix = [ [query, execute_time, self.__dict_query_count.get(query, 0)] for query, execute_time in six.iteritems(self.__dict_query_totalexectime) ] attr_names = ("sql_query", "cumulative_time", "count") con_tmp = connect_memdb() try: con_tmp.create_table_from_data_matrix( profile_table_name, attr_names, data_matrix=value_matrix ) except ValueError: return [] try: result = con_tmp.select( select="{:s},SUM({:s}),SUM({:s})".format(*attr_names), table_name=profile_table_name, extra="GROUP BY {:s} ORDER BY {:s} DESC LIMIT {:d}".format( attr_names[0], attr_names[1], profile_count ), ) except sqlite3.OperationalError: return [] if result is None: return [] SqliteProfile = namedtuple("SqliteProfile", " ".join(attr_names)) return [SqliteProfile(*profile) for profile in result.fetchall()]
def function[get_profile, parameter[self, profile_count]]: constant[ Get profile of query execution time. :param int profile_count: Number of profiles to retrieve, counted from the top query in descending order by the cumulative execution time. :return: Profile information for each query. :rtype: list of |namedtuple| :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :raises simplesqlite.OperationalError: |raises_operational_error| :Example: :ref:`example-get-profile` ] from relative_module[collections] import module[namedtuple] variable[profile_table_name] assign[=] constant[sql_profile] variable[value_matrix] assign[=] <ast.ListComp object at 0x7da1b0417220> variable[attr_names] assign[=] tuple[[<ast.Constant object at 0x7da1b0415810>, <ast.Constant object at 0x7da1b0417010>, <ast.Constant object at 0x7da1b04167a0>]] variable[con_tmp] assign[=] call[name[connect_memdb], parameter[]] <ast.Try object at 0x7da1b0416d10> <ast.Try object at 0x7da1b0417550> if compare[name[result] is constant[None]] begin[:] return[list[[]]] variable[SqliteProfile] assign[=] call[name[namedtuple], parameter[constant[SqliteProfile], call[constant[ ].join, parameter[name[attr_names]]]]] return[<ast.ListComp object at 0x7da1b04f9990>]
keyword[def] identifier[get_profile] ( identifier[self] , identifier[profile_count] = literal[int] ): literal[string] keyword[from] identifier[collections] keyword[import] identifier[namedtuple] identifier[profile_table_name] = literal[string] identifier[value_matrix] =[ [ identifier[query] , identifier[execute_time] , identifier[self] . identifier[__dict_query_count] . identifier[get] ( identifier[query] , literal[int] )] keyword[for] identifier[query] , identifier[execute_time] keyword[in] identifier[six] . identifier[iteritems] ( identifier[self] . identifier[__dict_query_totalexectime] ) ] identifier[attr_names] =( literal[string] , literal[string] , literal[string] ) identifier[con_tmp] = identifier[connect_memdb] () keyword[try] : identifier[con_tmp] . identifier[create_table_from_data_matrix] ( identifier[profile_table_name] , identifier[attr_names] , identifier[data_matrix] = identifier[value_matrix] ) keyword[except] identifier[ValueError] : keyword[return] [] keyword[try] : identifier[result] = identifier[con_tmp] . identifier[select] ( identifier[select] = literal[string] . identifier[format] (* identifier[attr_names] ), identifier[table_name] = identifier[profile_table_name] , identifier[extra] = literal[string] . identifier[format] ( identifier[attr_names] [ literal[int] ], identifier[attr_names] [ literal[int] ], identifier[profile_count] ), ) keyword[except] identifier[sqlite3] . identifier[OperationalError] : keyword[return] [] keyword[if] identifier[result] keyword[is] keyword[None] : keyword[return] [] identifier[SqliteProfile] = identifier[namedtuple] ( literal[string] , literal[string] . identifier[join] ( identifier[attr_names] )) keyword[return] [ identifier[SqliteProfile] (* identifier[profile] ) keyword[for] identifier[profile] keyword[in] identifier[result] . identifier[fetchall] ()]
def get_profile(self, profile_count=50): """ Get profile of query execution time. :param int profile_count: Number of profiles to retrieve, counted from the top query in descending order by the cumulative execution time. :return: Profile information for each query. :rtype: list of |namedtuple| :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :raises simplesqlite.OperationalError: |raises_operational_error| :Example: :ref:`example-get-profile` """ from collections import namedtuple profile_table_name = 'sql_profile' value_matrix = [[query, execute_time, self.__dict_query_count.get(query, 0)] for (query, execute_time) in six.iteritems(self.__dict_query_totalexectime)] attr_names = ('sql_query', 'cumulative_time', 'count') con_tmp = connect_memdb() try: con_tmp.create_table_from_data_matrix(profile_table_name, attr_names, data_matrix=value_matrix) # depends on [control=['try'], data=[]] except ValueError: return [] # depends on [control=['except'], data=[]] try: result = con_tmp.select(select='{:s},SUM({:s}),SUM({:s})'.format(*attr_names), table_name=profile_table_name, extra='GROUP BY {:s} ORDER BY {:s} DESC LIMIT {:d}'.format(attr_names[0], attr_names[1], profile_count)) # depends on [control=['try'], data=[]] except sqlite3.OperationalError: return [] # depends on [control=['except'], data=[]] if result is None: return [] # depends on [control=['if'], data=[]] SqliteProfile = namedtuple('SqliteProfile', ' '.join(attr_names)) return [SqliteProfile(*profile) for profile in result.fetchall()]
def get_upcoming_events(self): """ Get upcoming PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, ascending. Exceptions ---------- * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ query = urllib.urlencode({'key': self._api_key, 'group_urlname': GROUP_URLNAME}) url = '{0}?{1}'.format(EVENTS_URL, query) data = self._http_get_json(url) events = data['results'] return [parse_event(event) for event in events]
def function[get_upcoming_events, parameter[self]]: constant[ Get upcoming PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, ascending. Exceptions ---------- * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded ] variable[query] assign[=] call[name[urllib].urlencode, parameter[dictionary[[<ast.Constant object at 0x7da20c7ca770>, <ast.Constant object at 0x7da20c7c98a0>], [<ast.Attribute object at 0x7da20c7cbf10>, <ast.Name object at 0x7da20c7c8640>]]]] variable[url] assign[=] call[constant[{0}?{1}].format, parameter[name[EVENTS_URL], name[query]]] variable[data] assign[=] call[name[self]._http_get_json, parameter[name[url]]] variable[events] assign[=] call[name[data]][constant[results]] return[<ast.ListComp object at 0x7da204566c20>]
keyword[def] identifier[get_upcoming_events] ( identifier[self] ): literal[string] identifier[query] = identifier[urllib] . identifier[urlencode] ({ literal[string] : identifier[self] . identifier[_api_key] , literal[string] : identifier[GROUP_URLNAME] }) identifier[url] = literal[string] . identifier[format] ( identifier[EVENTS_URL] , identifier[query] ) identifier[data] = identifier[self] . identifier[_http_get_json] ( identifier[url] ) identifier[events] = identifier[data] [ literal[string] ] keyword[return] [ identifier[parse_event] ( identifier[event] ) keyword[for] identifier[event] keyword[in] identifier[events] ]
def get_upcoming_events(self): """ Get upcoming PythonKC meetup events. Returns ------- List of ``pythonkc_meetups.types.MeetupEvent``, ordered by event time, ascending. Exceptions ---------- * PythonKCMeetupsBadJson * PythonKCMeetupsBadResponse * PythonKCMeetupsMeetupDown * PythonKCMeetupsNotJson * PythonKCMeetupsRateLimitExceeded """ query = urllib.urlencode({'key': self._api_key, 'group_urlname': GROUP_URLNAME}) url = '{0}?{1}'.format(EVENTS_URL, query) data = self._http_get_json(url) events = data['results'] return [parse_event(event) for event in events]
def set_motor_position(self, motor_name, position): """ Sets the motor target position. """ self.call_remote_api('simxSetJointTargetPosition', self.get_object_handle(motor_name), position, sending=True)
def function[set_motor_position, parameter[self, motor_name, position]]: constant[ Sets the motor target position. ] call[name[self].call_remote_api, parameter[constant[simxSetJointTargetPosition], call[name[self].get_object_handle, parameter[name[motor_name]]], name[position]]]
keyword[def] identifier[set_motor_position] ( identifier[self] , identifier[motor_name] , identifier[position] ): literal[string] identifier[self] . identifier[call_remote_api] ( literal[string] , identifier[self] . identifier[get_object_handle] ( identifier[motor_name] ), identifier[position] , identifier[sending] = keyword[True] )
def set_motor_position(self, motor_name, position): """ Sets the motor target position. """ self.call_remote_api('simxSetJointTargetPosition', self.get_object_handle(motor_name), position, sending=True)
def on(self, message, namespace=None): """Decorator to register a SocketIO event handler. This decorator must be applied to SocketIO event handlers. Example:: @socketio.on('my event', namespace='/chat') def handle_my_custom_event(json): print('received json: ' + str(json)) :param message: The name of the event. This is normally a user defined string, but a few event names are already defined. Use ``'message'`` to define a handler that takes a string payload, ``'json'`` to define a handler that takes a JSON blob payload, ``'connect'`` or ``'disconnect'`` to create handlers for connection and disconnection events. :param namespace: The namespace on which the handler is to be registered. Defaults to the global namespace. """ namespace = namespace or '/' def decorator(handler): def _handler(sid, *args): return self._handle_event(handler, message, namespace, sid, *args) if self.server: self.server.on(message, _handler, namespace=namespace) else: self.handlers.append((message, _handler, namespace)) return handler return decorator
def function[on, parameter[self, message, namespace]]: constant[Decorator to register a SocketIO event handler. This decorator must be applied to SocketIO event handlers. Example:: @socketio.on('my event', namespace='/chat') def handle_my_custom_event(json): print('received json: ' + str(json)) :param message: The name of the event. This is normally a user defined string, but a few event names are already defined. Use ``'message'`` to define a handler that takes a string payload, ``'json'`` to define a handler that takes a JSON blob payload, ``'connect'`` or ``'disconnect'`` to create handlers for connection and disconnection events. :param namespace: The namespace on which the handler is to be registered. Defaults to the global namespace. ] variable[namespace] assign[=] <ast.BoolOp object at 0x7da1b21ba7a0> def function[decorator, parameter[handler]]: def function[_handler, parameter[sid]]: return[call[name[self]._handle_event, parameter[name[handler], name[message], name[namespace], name[sid], <ast.Starred object at 0x7da1b21b9870>]]] if name[self].server begin[:] call[name[self].server.on, parameter[name[message], name[_handler]]] return[name[handler]] return[name[decorator]]
keyword[def] identifier[on] ( identifier[self] , identifier[message] , identifier[namespace] = keyword[None] ): literal[string] identifier[namespace] = identifier[namespace] keyword[or] literal[string] keyword[def] identifier[decorator] ( identifier[handler] ): keyword[def] identifier[_handler] ( identifier[sid] ,* identifier[args] ): keyword[return] identifier[self] . identifier[_handle_event] ( identifier[handler] , identifier[message] , identifier[namespace] , identifier[sid] , * identifier[args] ) keyword[if] identifier[self] . identifier[server] : identifier[self] . identifier[server] . identifier[on] ( identifier[message] , identifier[_handler] , identifier[namespace] = identifier[namespace] ) keyword[else] : identifier[self] . identifier[handlers] . identifier[append] (( identifier[message] , identifier[_handler] , identifier[namespace] )) keyword[return] identifier[handler] keyword[return] identifier[decorator]
def on(self, message, namespace=None): """Decorator to register a SocketIO event handler. This decorator must be applied to SocketIO event handlers. Example:: @socketio.on('my event', namespace='/chat') def handle_my_custom_event(json): print('received json: ' + str(json)) :param message: The name of the event. This is normally a user defined string, but a few event names are already defined. Use ``'message'`` to define a handler that takes a string payload, ``'json'`` to define a handler that takes a JSON blob payload, ``'connect'`` or ``'disconnect'`` to create handlers for connection and disconnection events. :param namespace: The namespace on which the handler is to be registered. Defaults to the global namespace. """ namespace = namespace or '/' def decorator(handler): def _handler(sid, *args): return self._handle_event(handler, message, namespace, sid, *args) if self.server: self.server.on(message, _handler, namespace=namespace) # depends on [control=['if'], data=[]] else: self.handlers.append((message, _handler, namespace)) return handler return decorator
def when_all_players_ready(self): """Initializes decisions based on ``player.initial_decision()``. If :attr:`num_subperiods` is set, starts a timed task to run the sub-periods. """ self.group_decisions = {} self.subperiod_group_decisions = {} for player in self.get_players(): self.group_decisions[player.participant.code] = player.initial_decision() self.subperiod_group_decisions[player.participant.code] = player.initial_decision() if self.num_subperiods(): emitter = DiscreteEventEmitter( self.period_length() / self.num_subperiods(), self.period_length(), self, self._subperiod_tick) emitter.start() elif self.rate_limit(): def _tick(current_interval, intervals): self.refresh_from_db() if self._group_decisions_updated: self.send('group_decisions', self.group_decisions) self._group_decisions_updated = False self.save(update_fields=['_group_decisions_updated']) update_period = self.rate_limit() emitter = DiscreteEventEmitter( update_period, self.period_length(), self, _tick) emitter.start() self.save()
def function[when_all_players_ready, parameter[self]]: constant[Initializes decisions based on ``player.initial_decision()``. If :attr:`num_subperiods` is set, starts a timed task to run the sub-periods. ] name[self].group_decisions assign[=] dictionary[[], []] name[self].subperiod_group_decisions assign[=] dictionary[[], []] for taget[name[player]] in starred[call[name[self].get_players, parameter[]]] begin[:] call[name[self].group_decisions][name[player].participant.code] assign[=] call[name[player].initial_decision, parameter[]] call[name[self].subperiod_group_decisions][name[player].participant.code] assign[=] call[name[player].initial_decision, parameter[]] if call[name[self].num_subperiods, parameter[]] begin[:] variable[emitter] assign[=] call[name[DiscreteEventEmitter], parameter[binary_operation[call[name[self].period_length, parameter[]] / call[name[self].num_subperiods, parameter[]]], call[name[self].period_length, parameter[]], name[self], name[self]._subperiod_tick]] call[name[emitter].start, parameter[]] call[name[self].save, parameter[]]
keyword[def] identifier[when_all_players_ready] ( identifier[self] ): literal[string] identifier[self] . identifier[group_decisions] ={} identifier[self] . identifier[subperiod_group_decisions] ={} keyword[for] identifier[player] keyword[in] identifier[self] . identifier[get_players] (): identifier[self] . identifier[group_decisions] [ identifier[player] . identifier[participant] . identifier[code] ]= identifier[player] . identifier[initial_decision] () identifier[self] . identifier[subperiod_group_decisions] [ identifier[player] . identifier[participant] . identifier[code] ]= identifier[player] . identifier[initial_decision] () keyword[if] identifier[self] . identifier[num_subperiods] (): identifier[emitter] = identifier[DiscreteEventEmitter] ( identifier[self] . identifier[period_length] ()/ identifier[self] . identifier[num_subperiods] (), identifier[self] . identifier[period_length] (), identifier[self] , identifier[self] . identifier[_subperiod_tick] ) identifier[emitter] . identifier[start] () keyword[elif] identifier[self] . identifier[rate_limit] (): keyword[def] identifier[_tick] ( identifier[current_interval] , identifier[intervals] ): identifier[self] . identifier[refresh_from_db] () keyword[if] identifier[self] . identifier[_group_decisions_updated] : identifier[self] . identifier[send] ( literal[string] , identifier[self] . identifier[group_decisions] ) identifier[self] . identifier[_group_decisions_updated] = keyword[False] identifier[self] . identifier[save] ( identifier[update_fields] =[ literal[string] ]) identifier[update_period] = identifier[self] . identifier[rate_limit] () identifier[emitter] = identifier[DiscreteEventEmitter] ( identifier[update_period] , identifier[self] . identifier[period_length] (), identifier[self] , identifier[_tick] ) identifier[emitter] . identifier[start] () identifier[self] . identifier[save] ()
def when_all_players_ready(self): """Initializes decisions based on ``player.initial_decision()``. If :attr:`num_subperiods` is set, starts a timed task to run the sub-periods. """ self.group_decisions = {} self.subperiod_group_decisions = {} for player in self.get_players(): self.group_decisions[player.participant.code] = player.initial_decision() self.subperiod_group_decisions[player.participant.code] = player.initial_decision() # depends on [control=['for'], data=['player']] if self.num_subperiods(): emitter = DiscreteEventEmitter(self.period_length() / self.num_subperiods(), self.period_length(), self, self._subperiod_tick) emitter.start() # depends on [control=['if'], data=[]] elif self.rate_limit(): def _tick(current_interval, intervals): self.refresh_from_db() if self._group_decisions_updated: self.send('group_decisions', self.group_decisions) self._group_decisions_updated = False self.save(update_fields=['_group_decisions_updated']) # depends on [control=['if'], data=[]] update_period = self.rate_limit() emitter = DiscreteEventEmitter(update_period, self.period_length(), self, _tick) emitter.start() # depends on [control=['if'], data=[]] self.save()
def get_draft_secret_key(): """ Return the secret key used to generate draft mode HMACs. It will be randomly generated on first access. Existing draft URLs can be invalidated by deleting or updating the ``DRAFT_SECRET_KEY`` setting. """ # TODO: Per URL secret keys, so we can invalidate draft URLs for individual # pages. For example, on publish. draft_secret_key, created = Text.objects.get_or_create( name='DRAFT_SECRET_KEY', defaults=dict( value=get_random_string(50), )) return draft_secret_key.value
def function[get_draft_secret_key, parameter[]]: constant[ Return the secret key used to generate draft mode HMACs. It will be randomly generated on first access. Existing draft URLs can be invalidated by deleting or updating the ``DRAFT_SECRET_KEY`` setting. ] <ast.Tuple object at 0x7da18dc07850> assign[=] call[name[Text].objects.get_or_create, parameter[]] return[name[draft_secret_key].value]
keyword[def] identifier[get_draft_secret_key] (): literal[string] identifier[draft_secret_key] , identifier[created] = identifier[Text] . identifier[objects] . identifier[get_or_create] ( identifier[name] = literal[string] , identifier[defaults] = identifier[dict] ( identifier[value] = identifier[get_random_string] ( literal[int] ), )) keyword[return] identifier[draft_secret_key] . identifier[value]
def get_draft_secret_key(): """ Return the secret key used to generate draft mode HMACs. It will be randomly generated on first access. Existing draft URLs can be invalidated by deleting or updating the ``DRAFT_SECRET_KEY`` setting. """ # TODO: Per URL secret keys, so we can invalidate draft URLs for individual # pages. For example, on publish. (draft_secret_key, created) = Text.objects.get_or_create(name='DRAFT_SECRET_KEY', defaults=dict(value=get_random_string(50))) return draft_secret_key.value
def get_ref_annotation_data_for_tier(self, id_tier): """"Give a list of all reference annotations of the form: ``[(start, end, value, refvalue)]`` :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. :returns: Reference annotations within that tier. """ bucket = [] for aid, (ref, value, prev, _) in self.tiers[id_tier][1].items(): refann = self.get_parent_aligned_annotation(ref) bucket.append((self.timeslots[refann[0]], self.timeslots[refann[1]], value, refann[2])) return bucket
def function[get_ref_annotation_data_for_tier, parameter[self, id_tier]]: constant["Give a list of all reference annotations of the form: ``[(start, end, value, refvalue)]`` :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. :returns: Reference annotations within that tier. ] variable[bucket] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0216110>, <ast.Tuple object at 0x7da1b02159c0>]]] in starred[call[call[call[name[self].tiers][name[id_tier]]][constant[1]].items, parameter[]]] begin[:] variable[refann] assign[=] call[name[self].get_parent_aligned_annotation, parameter[name[ref]]] call[name[bucket].append, parameter[tuple[[<ast.Subscript object at 0x7da1b0215810>, <ast.Subscript object at 0x7da1b0214880>, <ast.Name object at 0x7da1b0214910>, <ast.Subscript object at 0x7da1b0214070>]]]] return[name[bucket]]
keyword[def] identifier[get_ref_annotation_data_for_tier] ( identifier[self] , identifier[id_tier] ): literal[string] identifier[bucket] =[] keyword[for] identifier[aid] ,( identifier[ref] , identifier[value] , identifier[prev] , identifier[_] ) keyword[in] identifier[self] . identifier[tiers] [ identifier[id_tier] ][ literal[int] ]. identifier[items] (): identifier[refann] = identifier[self] . identifier[get_parent_aligned_annotation] ( identifier[ref] ) identifier[bucket] . identifier[append] (( identifier[self] . identifier[timeslots] [ identifier[refann] [ literal[int] ]], identifier[self] . identifier[timeslots] [ identifier[refann] [ literal[int] ]], identifier[value] , identifier[refann] [ literal[int] ])) keyword[return] identifier[bucket]
def get_ref_annotation_data_for_tier(self, id_tier): """"Give a list of all reference annotations of the form: ``[(start, end, value, refvalue)]`` :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. :returns: Reference annotations within that tier. """ bucket = [] for (aid, (ref, value, prev, _)) in self.tiers[id_tier][1].items(): refann = self.get_parent_aligned_annotation(ref) bucket.append((self.timeslots[refann[0]], self.timeslots[refann[1]], value, refann[2])) # depends on [control=['for'], data=[]] return bucket
def now_millis(absolute=False) -> int: """Return current millis since epoch as integer.""" millis = int(time.time() * 1e3) if absolute: return millis return millis - EPOCH_MICROS // 1000
def function[now_millis, parameter[absolute]]: constant[Return current millis since epoch as integer.] variable[millis] assign[=] call[name[int], parameter[binary_operation[call[name[time].time, parameter[]] * constant[1000.0]]]] if name[absolute] begin[:] return[name[millis]] return[binary_operation[name[millis] - binary_operation[name[EPOCH_MICROS] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1000]]]]
keyword[def] identifier[now_millis] ( identifier[absolute] = keyword[False] )-> identifier[int] : literal[string] identifier[millis] = identifier[int] ( identifier[time] . identifier[time] ()* literal[int] ) keyword[if] identifier[absolute] : keyword[return] identifier[millis] keyword[return] identifier[millis] - identifier[EPOCH_MICROS] // literal[int]
def now_millis(absolute=False) -> int: """Return current millis since epoch as integer.""" millis = int(time.time() * 1000.0) if absolute: return millis # depends on [control=['if'], data=[]] return millis - EPOCH_MICROS // 1000
def paintEvent(self, event): """Paint the widget :param event: :type event: :returns: None :rtype: None :raises: None """ if not self.toPlainText() and not self.hasFocus() and self._placeholder: p = QtGui.QPainter(self.viewport()) p.setClipping(False) col = self.palette().text().color() col.setAlpha(128) oldpen = p.pen() p.setPen(col) p.drawText(self.viewport().geometry(), QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop, self._placeholder) p.setPen(oldpen) else: return super(JB_PlainTextEdit, self).paintEvent(event)
def function[paintEvent, parameter[self, event]]: constant[Paint the widget :param event: :type event: :returns: None :rtype: None :raises: None ] if <ast.BoolOp object at 0x7da18fe917e0> begin[:] variable[p] assign[=] call[name[QtGui].QPainter, parameter[call[name[self].viewport, parameter[]]]] call[name[p].setClipping, parameter[constant[False]]] variable[col] assign[=] call[call[call[name[self].palette, parameter[]].text, parameter[]].color, parameter[]] call[name[col].setAlpha, parameter[constant[128]]] variable[oldpen] assign[=] call[name[p].pen, parameter[]] call[name[p].setPen, parameter[name[col]]] call[name[p].drawText, parameter[call[call[name[self].viewport, parameter[]].geometry, parameter[]], binary_operation[name[QtCore].Qt.AlignLeft <ast.BitOr object at 0x7da2590d6aa0> name[QtCore].Qt.AlignTop], name[self]._placeholder]] call[name[p].setPen, parameter[name[oldpen]]]
keyword[def] identifier[paintEvent] ( identifier[self] , identifier[event] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[toPlainText] () keyword[and] keyword[not] identifier[self] . identifier[hasFocus] () keyword[and] identifier[self] . identifier[_placeholder] : identifier[p] = identifier[QtGui] . identifier[QPainter] ( identifier[self] . identifier[viewport] ()) identifier[p] . identifier[setClipping] ( keyword[False] ) identifier[col] = identifier[self] . identifier[palette] (). identifier[text] (). identifier[color] () identifier[col] . identifier[setAlpha] ( literal[int] ) identifier[oldpen] = identifier[p] . identifier[pen] () identifier[p] . identifier[setPen] ( identifier[col] ) identifier[p] . identifier[drawText] ( identifier[self] . identifier[viewport] (). identifier[geometry] (), identifier[QtCore] . identifier[Qt] . identifier[AlignLeft] | identifier[QtCore] . identifier[Qt] . identifier[AlignTop] , identifier[self] . identifier[_placeholder] ) identifier[p] . identifier[setPen] ( identifier[oldpen] ) keyword[else] : keyword[return] identifier[super] ( identifier[JB_PlainTextEdit] , identifier[self] ). identifier[paintEvent] ( identifier[event] )
def paintEvent(self, event): """Paint the widget :param event: :type event: :returns: None :rtype: None :raises: None """ if not self.toPlainText() and (not self.hasFocus()) and self._placeholder: p = QtGui.QPainter(self.viewport()) p.setClipping(False) col = self.palette().text().color() col.setAlpha(128) oldpen = p.pen() p.setPen(col) p.drawText(self.viewport().geometry(), QtCore.Qt.AlignLeft | QtCore.Qt.AlignTop, self._placeholder) p.setPen(oldpen) # depends on [control=['if'], data=[]] else: return super(JB_PlainTextEdit, self).paintEvent(event)
def last_year(date_): ''' Returns the same date 1 year ago. Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ''' day = 28 if date_.day == 29 and date_.month == 2 else date_.day return datetime.date(date_.year-1, date_.month, day)
def function[last_year, parameter[date_]]: constant[ Returns the same date 1 year ago. Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - ] variable[day] assign[=] <ast.IfExp object at 0x7da1b135ea40> return[call[name[datetime].date, parameter[binary_operation[name[date_].year - constant[1]], name[date_].month, name[day]]]]
keyword[def] identifier[last_year] ( identifier[date_] ): literal[string] identifier[day] = literal[int] keyword[if] identifier[date_] . identifier[day] == literal[int] keyword[and] identifier[date_] . identifier[month] == literal[int] keyword[else] identifier[date_] . identifier[day] keyword[return] identifier[datetime] . identifier[date] ( identifier[date_] . identifier[year] - literal[int] , identifier[date_] . identifier[month] , identifier[day] )
def last_year(date_): """ Returns the same date 1 year ago. Args: date (datetime or datetime.date) Returns: (datetime or datetime.date) Raises: - """ day = 28 if date_.day == 29 and date_.month == 2 else date_.day return datetime.date(date_.year - 1, date_.month, day)
def load_crmod_volt(self, filename): """Load a CRMod measurement file (commonly called volt.dat) Parameters ---------- filename: string path to filename Returns ------- list list of measurement ids """ with open(filename, 'r') as fid: nr_of_configs = int(fid.readline().strip()) measurements = np.loadtxt(fid) if nr_of_configs != measurements.shape[0]: raise Exception( 'indicated number of measurements does not equal ' + 'to actual number of measurements') ABMN = self._crmod_to_abmn(measurements[:, 0:2]) if self.configs is None: self.configs = ABMN else: # check that configs match if not np.all(ABMN == self.configs): raise Exception( 'previously stored configurations do not match new ' + 'configurations') # add data cid_mag = self.add_measurements(measurements[:, 2]) cid_pha = self.add_measurements(measurements[:, 3]) return [cid_mag, cid_pha]
def function[load_crmod_volt, parameter[self, filename]]: constant[Load a CRMod measurement file (commonly called volt.dat) Parameters ---------- filename: string path to filename Returns ------- list list of measurement ids ] with call[name[open], parameter[name[filename], constant[r]]] begin[:] variable[nr_of_configs] assign[=] call[name[int], parameter[call[call[name[fid].readline, parameter[]].strip, parameter[]]]] variable[measurements] assign[=] call[name[np].loadtxt, parameter[name[fid]]] if compare[name[nr_of_configs] not_equal[!=] call[name[measurements].shape][constant[0]]] begin[:] <ast.Raise object at 0x7da204566110> variable[ABMN] assign[=] call[name[self]._crmod_to_abmn, parameter[call[name[measurements]][tuple[[<ast.Slice object at 0x7da18f09d300>, <ast.Slice object at 0x7da18f09ceb0>]]]]] if compare[name[self].configs is constant[None]] begin[:] name[self].configs assign[=] name[ABMN] variable[cid_mag] assign[=] call[name[self].add_measurements, parameter[call[name[measurements]][tuple[[<ast.Slice object at 0x7da18f09d060>, <ast.Constant object at 0x7da18f09ce20>]]]]] variable[cid_pha] assign[=] call[name[self].add_measurements, parameter[call[name[measurements]][tuple[[<ast.Slice object at 0x7da18f09eef0>, <ast.Constant object at 0x7da18f09c670>]]]]] return[list[[<ast.Name object at 0x7da18f09cb80>, <ast.Name object at 0x7da18f09dbd0>]]]
keyword[def] identifier[load_crmod_volt] ( identifier[self] , identifier[filename] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fid] : identifier[nr_of_configs] = identifier[int] ( identifier[fid] . identifier[readline] (). identifier[strip] ()) identifier[measurements] = identifier[np] . identifier[loadtxt] ( identifier[fid] ) keyword[if] identifier[nr_of_configs] != identifier[measurements] . identifier[shape] [ literal[int] ]: keyword[raise] identifier[Exception] ( literal[string] + literal[string] ) identifier[ABMN] = identifier[self] . identifier[_crmod_to_abmn] ( identifier[measurements] [:, literal[int] : literal[int] ]) keyword[if] identifier[self] . identifier[configs] keyword[is] keyword[None] : identifier[self] . identifier[configs] = identifier[ABMN] keyword[else] : keyword[if] keyword[not] identifier[np] . identifier[all] ( identifier[ABMN] == identifier[self] . identifier[configs] ): keyword[raise] identifier[Exception] ( literal[string] + literal[string] ) identifier[cid_mag] = identifier[self] . identifier[add_measurements] ( identifier[measurements] [:, literal[int] ]) identifier[cid_pha] = identifier[self] . identifier[add_measurements] ( identifier[measurements] [:, literal[int] ]) keyword[return] [ identifier[cid_mag] , identifier[cid_pha] ]
def load_crmod_volt(self, filename): """Load a CRMod measurement file (commonly called volt.dat) Parameters ---------- filename: string path to filename Returns ------- list list of measurement ids """ with open(filename, 'r') as fid: nr_of_configs = int(fid.readline().strip()) measurements = np.loadtxt(fid) if nr_of_configs != measurements.shape[0]: raise Exception('indicated number of measurements does not equal ' + 'to actual number of measurements') # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fid']] ABMN = self._crmod_to_abmn(measurements[:, 0:2]) if self.configs is None: self.configs = ABMN # depends on [control=['if'], data=[]] # check that configs match elif not np.all(ABMN == self.configs): raise Exception('previously stored configurations do not match new ' + 'configurations') # depends on [control=['if'], data=[]] # add data cid_mag = self.add_measurements(measurements[:, 2]) cid_pha = self.add_measurements(measurements[:, 3]) return [cid_mag, cid_pha]
def get_subgraph(self, starting_node, block_addresses): """ Get a sub-graph out of a bunch of basic block addresses. :param CFGNode starting_node: The beginning of the subgraph :param iterable block_addresses: A collection of block addresses that should be included in the subgraph if there is a path between `starting_node` and a CFGNode with the specified address, and all nodes on the path should also be included in the subgraph. :return: A new CFG that only contain the specific subgraph. :rtype: CFGEmulated """ graph = networkx.DiGraph() if starting_node not in self.graph: raise AngrCFGError('get_subgraph(): the specified "starting_node" %s does not exist in the current CFG.' % starting_node ) addr_set = set(block_addresses) graph.add_node(starting_node) queue = [ starting_node ] while queue: node = queue.pop() for _, dst, data in self.graph.out_edges([node], data=True): if dst not in graph and dst.addr in addr_set: graph.add_edge(node, dst, **data) queue.append(dst) cfg = self.copy() cfg._graph = graph cfg._starts = (starting_node.addr, ) return cfg
def function[get_subgraph, parameter[self, starting_node, block_addresses]]: constant[ Get a sub-graph out of a bunch of basic block addresses. :param CFGNode starting_node: The beginning of the subgraph :param iterable block_addresses: A collection of block addresses that should be included in the subgraph if there is a path between `starting_node` and a CFGNode with the specified address, and all nodes on the path should also be included in the subgraph. :return: A new CFG that only contain the specific subgraph. :rtype: CFGEmulated ] variable[graph] assign[=] call[name[networkx].DiGraph, parameter[]] if compare[name[starting_node] <ast.NotIn object at 0x7da2590d7190> name[self].graph] begin[:] <ast.Raise object at 0x7da1b26ae920> variable[addr_set] assign[=] call[name[set], parameter[name[block_addresses]]] call[name[graph].add_node, parameter[name[starting_node]]] variable[queue] assign[=] list[[<ast.Name object at 0x7da1b26aeef0>]] while name[queue] begin[:] variable[node] assign[=] call[name[queue].pop, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b26aead0>, <ast.Name object at 0x7da1b26adba0>, <ast.Name object at 0x7da1b26afca0>]]] in starred[call[name[self].graph.out_edges, parameter[list[[<ast.Name object at 0x7da1b26aca90>]]]]] begin[:] if <ast.BoolOp object at 0x7da1b26aece0> begin[:] call[name[graph].add_edge, parameter[name[node], name[dst]]] call[name[queue].append, parameter[name[dst]]] variable[cfg] assign[=] call[name[self].copy, parameter[]] name[cfg]._graph assign[=] name[graph] name[cfg]._starts assign[=] tuple[[<ast.Attribute object at 0x7da1b26afc70>]] return[name[cfg]]
keyword[def] identifier[get_subgraph] ( identifier[self] , identifier[starting_node] , identifier[block_addresses] ): literal[string] identifier[graph] = identifier[networkx] . identifier[DiGraph] () keyword[if] identifier[starting_node] keyword[not] keyword[in] identifier[self] . identifier[graph] : keyword[raise] identifier[AngrCFGError] ( literal[string] % identifier[starting_node] ) identifier[addr_set] = identifier[set] ( identifier[block_addresses] ) identifier[graph] . identifier[add_node] ( identifier[starting_node] ) identifier[queue] =[ identifier[starting_node] ] keyword[while] identifier[queue] : identifier[node] = identifier[queue] . identifier[pop] () keyword[for] identifier[_] , identifier[dst] , identifier[data] keyword[in] identifier[self] . identifier[graph] . identifier[out_edges] ([ identifier[node] ], identifier[data] = keyword[True] ): keyword[if] identifier[dst] keyword[not] keyword[in] identifier[graph] keyword[and] identifier[dst] . identifier[addr] keyword[in] identifier[addr_set] : identifier[graph] . identifier[add_edge] ( identifier[node] , identifier[dst] ,** identifier[data] ) identifier[queue] . identifier[append] ( identifier[dst] ) identifier[cfg] = identifier[self] . identifier[copy] () identifier[cfg] . identifier[_graph] = identifier[graph] identifier[cfg] . identifier[_starts] =( identifier[starting_node] . identifier[addr] ,) keyword[return] identifier[cfg]
def get_subgraph(self, starting_node, block_addresses): """ Get a sub-graph out of a bunch of basic block addresses. :param CFGNode starting_node: The beginning of the subgraph :param iterable block_addresses: A collection of block addresses that should be included in the subgraph if there is a path between `starting_node` and a CFGNode with the specified address, and all nodes on the path should also be included in the subgraph. :return: A new CFG that only contain the specific subgraph. :rtype: CFGEmulated """ graph = networkx.DiGraph() if starting_node not in self.graph: raise AngrCFGError('get_subgraph(): the specified "starting_node" %s does not exist in the current CFG.' % starting_node) # depends on [control=['if'], data=['starting_node']] addr_set = set(block_addresses) graph.add_node(starting_node) queue = [starting_node] while queue: node = queue.pop() for (_, dst, data) in self.graph.out_edges([node], data=True): if dst not in graph and dst.addr in addr_set: graph.add_edge(node, dst, **data) queue.append(dst) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]] cfg = self.copy() cfg._graph = graph cfg._starts = (starting_node.addr,) return cfg
def get_tournaments(self, only_active=True): """Get all tournaments Args: only_active (bool): Flag to indicate of only active tournaments should be returned or all of them. Defaults to True. Returns: list of dicts: list of tournaments Each tournaments' dict contains the following items: * id (`str`) * name (`str`) * tournament (`int`) * active (`bool`) Example: >>> NumerAPI().get_tournaments() [ { 'id': '2ecf30f4-4b4f-42e9-8e72-cc5bd61c2733', 'name': 'alpha', 'tournament': 1, 'active': True}, { 'id': '6ff44cca-263d-40bd-b029-a1ab8f42798f', 'name': 'bravo', 'tournament': 2, 'active': True}, { 'id': 'ebf0d62b-0f60-4550-bcec-c737b168c65d', 'name': 'charlie', 'tournament': 3 'active': False}, { 'id': '5fac6ece-2726-4b66-9790-95866b3a77fc', 'name': 'delta', 'tournament': 4, 'active': True}] """ query = """ query { tournaments { id name tournament active } } """ data = self.raw_query(query)['data']['tournaments'] if only_active: data = [d for d in data if d['active']] return data
def function[get_tournaments, parameter[self, only_active]]: constant[Get all tournaments Args: only_active (bool): Flag to indicate of only active tournaments should be returned or all of them. Defaults to True. Returns: list of dicts: list of tournaments Each tournaments' dict contains the following items: * id (`str`) * name (`str`) * tournament (`int`) * active (`bool`) Example: >>> NumerAPI().get_tournaments() [ { 'id': '2ecf30f4-4b4f-42e9-8e72-cc5bd61c2733', 'name': 'alpha', 'tournament': 1, 'active': True}, { 'id': '6ff44cca-263d-40bd-b029-a1ab8f42798f', 'name': 'bravo', 'tournament': 2, 'active': True}, { 'id': 'ebf0d62b-0f60-4550-bcec-c737b168c65d', 'name': 'charlie', 'tournament': 3 'active': False}, { 'id': '5fac6ece-2726-4b66-9790-95866b3a77fc', 'name': 'delta', 'tournament': 4, 'active': True}] ] variable[query] assign[=] constant[ query { tournaments { id name tournament active } } ] variable[data] assign[=] call[call[call[name[self].raw_query, parameter[name[query]]]][constant[data]]][constant[tournaments]] if name[only_active] begin[:] variable[data] assign[=] <ast.ListComp object at 0x7da1b0c64040> return[name[data]]
keyword[def] identifier[get_tournaments] ( identifier[self] , identifier[only_active] = keyword[True] ): literal[string] identifier[query] = literal[string] identifier[data] = identifier[self] . identifier[raw_query] ( identifier[query] )[ literal[string] ][ literal[string] ] keyword[if] identifier[only_active] : identifier[data] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[data] keyword[if] identifier[d] [ literal[string] ]] keyword[return] identifier[data]
def get_tournaments(self, only_active=True): """Get all tournaments Args: only_active (bool): Flag to indicate of only active tournaments should be returned or all of them. Defaults to True. Returns: list of dicts: list of tournaments Each tournaments' dict contains the following items: * id (`str`) * name (`str`) * tournament (`int`) * active (`bool`) Example: >>> NumerAPI().get_tournaments() [ { 'id': '2ecf30f4-4b4f-42e9-8e72-cc5bd61c2733', 'name': 'alpha', 'tournament': 1, 'active': True}, { 'id': '6ff44cca-263d-40bd-b029-a1ab8f42798f', 'name': 'bravo', 'tournament': 2, 'active': True}, { 'id': 'ebf0d62b-0f60-4550-bcec-c737b168c65d', 'name': 'charlie', 'tournament': 3 'active': False}, { 'id': '5fac6ece-2726-4b66-9790-95866b3a77fc', 'name': 'delta', 'tournament': 4, 'active': True}] """ query = '\n query {\n tournaments {\n id\n name\n tournament\n active\n }\n }\n ' data = self.raw_query(query)['data']['tournaments'] if only_active: data = [d for d in data if d['active']] # depends on [control=['if'], data=[]] return data
def __choices2tkvalues(self, choices): """choices: iterable of key, value pairs""" values = [] for k, v in choices: values.append(v) return values
def function[__choices2tkvalues, parameter[self, choices]]: constant[choices: iterable of key, value pairs] variable[values] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b17cded0>, <ast.Name object at 0x7da1b17cf100>]]] in starred[name[choices]] begin[:] call[name[values].append, parameter[name[v]]] return[name[values]]
keyword[def] identifier[__choices2tkvalues] ( identifier[self] , identifier[choices] ): literal[string] identifier[values] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[choices] : identifier[values] . identifier[append] ( identifier[v] ) keyword[return] identifier[values]
def __choices2tkvalues(self, choices): """choices: iterable of key, value pairs""" values = [] for (k, v) in choices: values.append(v) # depends on [control=['for'], data=[]] return values
def get_conn(self): """ Establishes a connection depending on the security mode set via config or environment variable. :return: a hdfscli InsecureClient or KerberosClient object. :rtype: hdfs.InsecureClient or hdfs.ext.kerberos.KerberosClient """ connections = self.get_connections(self.webhdfs_conn_id) for connection in connections: try: self.log.debug('Trying namenode %s', connection.host) client = self._get_client(connection) client.status('/') self.log.debug('Using namenode %s for hook', connection.host) return client except HdfsError as hdfs_error: self.log.debug('Read operation on namenode %s failed with error: %s', connection.host, hdfs_error) hosts = [connection.host for connection in connections] error_message = 'Read operations failed on the namenodes below:\n{hosts}'.format( hosts='\n'.join(hosts)) raise AirflowWebHDFSHookException(error_message)
def function[get_conn, parameter[self]]: constant[ Establishes a connection depending on the security mode set via config or environment variable. :return: a hdfscli InsecureClient or KerberosClient object. :rtype: hdfs.InsecureClient or hdfs.ext.kerberos.KerberosClient ] variable[connections] assign[=] call[name[self].get_connections, parameter[name[self].webhdfs_conn_id]] for taget[name[connection]] in starred[name[connections]] begin[:] <ast.Try object at 0x7da1b034aa40> variable[hosts] assign[=] <ast.ListComp object at 0x7da1b0348ac0> variable[error_message] assign[=] call[constant[Read operations failed on the namenodes below: {hosts}].format, parameter[]] <ast.Raise object at 0x7da1b0348dc0>
keyword[def] identifier[get_conn] ( identifier[self] ): literal[string] identifier[connections] = identifier[self] . identifier[get_connections] ( identifier[self] . identifier[webhdfs_conn_id] ) keyword[for] identifier[connection] keyword[in] identifier[connections] : keyword[try] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[connection] . identifier[host] ) identifier[client] = identifier[self] . identifier[_get_client] ( identifier[connection] ) identifier[client] . identifier[status] ( literal[string] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[connection] . identifier[host] ) keyword[return] identifier[client] keyword[except] identifier[HdfsError] keyword[as] identifier[hdfs_error] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[connection] . identifier[host] , identifier[hdfs_error] ) identifier[hosts] =[ identifier[connection] . identifier[host] keyword[for] identifier[connection] keyword[in] identifier[connections] ] identifier[error_message] = literal[string] . identifier[format] ( identifier[hosts] = literal[string] . identifier[join] ( identifier[hosts] )) keyword[raise] identifier[AirflowWebHDFSHookException] ( identifier[error_message] )
def get_conn(self): """ Establishes a connection depending on the security mode set via config or environment variable. :return: a hdfscli InsecureClient or KerberosClient object. :rtype: hdfs.InsecureClient or hdfs.ext.kerberos.KerberosClient """ connections = self.get_connections(self.webhdfs_conn_id) for connection in connections: try: self.log.debug('Trying namenode %s', connection.host) client = self._get_client(connection) client.status('/') self.log.debug('Using namenode %s for hook', connection.host) return client # depends on [control=['try'], data=[]] except HdfsError as hdfs_error: self.log.debug('Read operation on namenode %s failed with error: %s', connection.host, hdfs_error) # depends on [control=['except'], data=['hdfs_error']] # depends on [control=['for'], data=['connection']] hosts = [connection.host for connection in connections] error_message = 'Read operations failed on the namenodes below:\n{hosts}'.format(hosts='\n'.join(hosts)) raise AirflowWebHDFSHookException(error_message)
def _create_arc(self, inst, destination, placeable=None): """ Returns a list of coordinates to arrive to the destination coordinate """ this_container = None if isinstance(placeable, containers.Well): this_container = placeable.get_parent() elif isinstance(placeable, containers.WellSeries): this_container = placeable.get_parent() elif isinstance(placeable, containers.Container): this_container = placeable if this_container and self._prev_container == this_container: # movements that stay within the same container do not need to # avoid other containers on the deck, so the travel height of # arced movements can be relative to just that one container arc_top = self.max_placeable_height_on_deck(this_container) arc_top += TIP_CLEARANCE_LABWARE elif self._use_safest_height: # bring the pipettes up as high as possible while calibrating arc_top = inst._max_deck_height() else: # bring pipette up above the tallest container currently on deck arc_top = self.max_deck_height() + TIP_CLEARANCE_DECK self._prev_container = this_container # if instrument is currently taller than arc_top, don't move down _, _, pip_z = pose_tracker.absolute(self.poses, inst) arc_top = max(arc_top, destination[2], pip_z) arc_top = min(arc_top, inst._max_deck_height()) strategy = [ {'z': arc_top}, {'x': destination[0], 'y': destination[1]}, {'z': destination[2]} ] return strategy
def function[_create_arc, parameter[self, inst, destination, placeable]]: constant[ Returns a list of coordinates to arrive to the destination coordinate ] variable[this_container] assign[=] constant[None] if call[name[isinstance], parameter[name[placeable], name[containers].Well]] begin[:] variable[this_container] assign[=] call[name[placeable].get_parent, parameter[]] if <ast.BoolOp object at 0x7da1b26ad270> begin[:] variable[arc_top] assign[=] call[name[self].max_placeable_height_on_deck, parameter[name[this_container]]] <ast.AugAssign object at 0x7da2044c3520> name[self]._prev_container assign[=] name[this_container] <ast.Tuple object at 0x7da2044c01f0> assign[=] call[name[pose_tracker].absolute, parameter[name[self].poses, name[inst]]] variable[arc_top] assign[=] call[name[max], parameter[name[arc_top], call[name[destination]][constant[2]], name[pip_z]]] variable[arc_top] assign[=] call[name[min], parameter[name[arc_top], call[name[inst]._max_deck_height, parameter[]]]] variable[strategy] assign[=] list[[<ast.Dict object at 0x7da2043476d0>, <ast.Dict object at 0x7da204344ac0>, <ast.Dict object at 0x7da204347940>]] return[name[strategy]]
keyword[def] identifier[_create_arc] ( identifier[self] , identifier[inst] , identifier[destination] , identifier[placeable] = keyword[None] ): literal[string] identifier[this_container] = keyword[None] keyword[if] identifier[isinstance] ( identifier[placeable] , identifier[containers] . identifier[Well] ): identifier[this_container] = identifier[placeable] . identifier[get_parent] () keyword[elif] identifier[isinstance] ( identifier[placeable] , identifier[containers] . identifier[WellSeries] ): identifier[this_container] = identifier[placeable] . identifier[get_parent] () keyword[elif] identifier[isinstance] ( identifier[placeable] , identifier[containers] . identifier[Container] ): identifier[this_container] = identifier[placeable] keyword[if] identifier[this_container] keyword[and] identifier[self] . identifier[_prev_container] == identifier[this_container] : identifier[arc_top] = identifier[self] . identifier[max_placeable_height_on_deck] ( identifier[this_container] ) identifier[arc_top] += identifier[TIP_CLEARANCE_LABWARE] keyword[elif] identifier[self] . identifier[_use_safest_height] : identifier[arc_top] = identifier[inst] . identifier[_max_deck_height] () keyword[else] : identifier[arc_top] = identifier[self] . identifier[max_deck_height] ()+ identifier[TIP_CLEARANCE_DECK] identifier[self] . identifier[_prev_container] = identifier[this_container] identifier[_] , identifier[_] , identifier[pip_z] = identifier[pose_tracker] . identifier[absolute] ( identifier[self] . identifier[poses] , identifier[inst] ) identifier[arc_top] = identifier[max] ( identifier[arc_top] , identifier[destination] [ literal[int] ], identifier[pip_z] ) identifier[arc_top] = identifier[min] ( identifier[arc_top] , identifier[inst] . identifier[_max_deck_height] ()) identifier[strategy] =[ { literal[string] : identifier[arc_top] }, { literal[string] : identifier[destination] [ literal[int] ], literal[string] : identifier[destination] [ literal[int] ]}, { literal[string] : identifier[destination] [ literal[int] ]} ] keyword[return] identifier[strategy]
def _create_arc(self, inst, destination, placeable=None): """ Returns a list of coordinates to arrive to the destination coordinate """ this_container = None if isinstance(placeable, containers.Well): this_container = placeable.get_parent() # depends on [control=['if'], data=[]] elif isinstance(placeable, containers.WellSeries): this_container = placeable.get_parent() # depends on [control=['if'], data=[]] elif isinstance(placeable, containers.Container): this_container = placeable # depends on [control=['if'], data=[]] if this_container and self._prev_container == this_container: # movements that stay within the same container do not need to # avoid other containers on the deck, so the travel height of # arced movements can be relative to just that one container arc_top = self.max_placeable_height_on_deck(this_container) arc_top += TIP_CLEARANCE_LABWARE # depends on [control=['if'], data=[]] elif self._use_safest_height: # bring the pipettes up as high as possible while calibrating arc_top = inst._max_deck_height() # depends on [control=['if'], data=[]] else: # bring pipette up above the tallest container currently on deck arc_top = self.max_deck_height() + TIP_CLEARANCE_DECK self._prev_container = this_container # if instrument is currently taller than arc_top, don't move down (_, _, pip_z) = pose_tracker.absolute(self.poses, inst) arc_top = max(arc_top, destination[2], pip_z) arc_top = min(arc_top, inst._max_deck_height()) strategy = [{'z': arc_top}, {'x': destination[0], 'y': destination[1]}, {'z': destination[2]}] return strategy
def set_bg(self, bg, key="data", attrs={}): """Set the background data Parameters ---------- bg: numbers.Real, 2d ndarray, ImageData, or h5py.Dataset The background data. If `bg` is an `h5py.Dataset` object, it must exist in the same hdf5 file (a hard link is created). If set to `None`, the data will be removed. key: str One of :const:`VALID_BG_KEYS`) attrs: dict List of background attributes See Also -------- del_bg: removing background data """ if key not in VALID_BG_KEYS: raise ValueError("Invalid bg key: {}".format(key)) # remove previous background key if key in self.h5["bg_data"]: del self.h5["bg_data"][key] # set background if isinstance(bg, (numbers.Real, np.ndarray)): dset = write_image_dataset(group=self.h5["bg_data"], key=key, data=bg, h5dtype=self.h5dtype) for kw in attrs: dset.attrs[kw] = attrs[kw] elif isinstance(bg, h5py.Dataset): # Create a hard link # (This functionality was intended for saving memory when storing # large QPSeries with universal background data, i.e. when using # `QPSeries.add_qpimage` with the `bg_from_idx` keyword.) self.h5["bg_data"][key] = bg elif bg is not None: msg = "Unknown background data type: {}".format(bg) raise ValueError(msg)
def function[set_bg, parameter[self, bg, key, attrs]]: constant[Set the background data Parameters ---------- bg: numbers.Real, 2d ndarray, ImageData, or h5py.Dataset The background data. If `bg` is an `h5py.Dataset` object, it must exist in the same hdf5 file (a hard link is created). If set to `None`, the data will be removed. key: str One of :const:`VALID_BG_KEYS`) attrs: dict List of background attributes See Also -------- del_bg: removing background data ] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[VALID_BG_KEYS]] begin[:] <ast.Raise object at 0x7da1b10471c0> if compare[name[key] in call[name[self].h5][constant[bg_data]]] begin[:] <ast.Delete object at 0x7da1b1045330> if call[name[isinstance], parameter[name[bg], tuple[[<ast.Attribute object at 0x7da1b10463b0>, <ast.Attribute object at 0x7da1b1046e00>]]]] begin[:] variable[dset] assign[=] call[name[write_image_dataset], parameter[]] for taget[name[kw]] in starred[name[attrs]] begin[:] call[name[dset].attrs][name[kw]] assign[=] call[name[attrs]][name[kw]]
keyword[def] identifier[set_bg] ( identifier[self] , identifier[bg] , identifier[key] = literal[string] , identifier[attrs] ={}): literal[string] keyword[if] identifier[key] keyword[not] keyword[in] identifier[VALID_BG_KEYS] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[key] )) keyword[if] identifier[key] keyword[in] identifier[self] . identifier[h5] [ literal[string] ]: keyword[del] identifier[self] . identifier[h5] [ literal[string] ][ identifier[key] ] keyword[if] identifier[isinstance] ( identifier[bg] ,( identifier[numbers] . identifier[Real] , identifier[np] . identifier[ndarray] )): identifier[dset] = identifier[write_image_dataset] ( identifier[group] = identifier[self] . identifier[h5] [ literal[string] ], identifier[key] = identifier[key] , identifier[data] = identifier[bg] , identifier[h5dtype] = identifier[self] . identifier[h5dtype] ) keyword[for] identifier[kw] keyword[in] identifier[attrs] : identifier[dset] . identifier[attrs] [ identifier[kw] ]= identifier[attrs] [ identifier[kw] ] keyword[elif] identifier[isinstance] ( identifier[bg] , identifier[h5py] . identifier[Dataset] ): identifier[self] . identifier[h5] [ literal[string] ][ identifier[key] ]= identifier[bg] keyword[elif] identifier[bg] keyword[is] keyword[not] keyword[None] : identifier[msg] = literal[string] . identifier[format] ( identifier[bg] ) keyword[raise] identifier[ValueError] ( identifier[msg] )
def set_bg(self, bg, key='data', attrs={}): """Set the background data Parameters ---------- bg: numbers.Real, 2d ndarray, ImageData, or h5py.Dataset The background data. If `bg` is an `h5py.Dataset` object, it must exist in the same hdf5 file (a hard link is created). If set to `None`, the data will be removed. key: str One of :const:`VALID_BG_KEYS`) attrs: dict List of background attributes See Also -------- del_bg: removing background data """ if key not in VALID_BG_KEYS: raise ValueError('Invalid bg key: {}'.format(key)) # depends on [control=['if'], data=['key']] # remove previous background key if key in self.h5['bg_data']: del self.h5['bg_data'][key] # depends on [control=['if'], data=['key']] # set background if isinstance(bg, (numbers.Real, np.ndarray)): dset = write_image_dataset(group=self.h5['bg_data'], key=key, data=bg, h5dtype=self.h5dtype) for kw in attrs: dset.attrs[kw] = attrs[kw] # depends on [control=['for'], data=['kw']] # depends on [control=['if'], data=[]] elif isinstance(bg, h5py.Dataset): # Create a hard link # (This functionality was intended for saving memory when storing # large QPSeries with universal background data, i.e. when using # `QPSeries.add_qpimage` with the `bg_from_idx` keyword.) self.h5['bg_data'][key] = bg # depends on [control=['if'], data=[]] elif bg is not None: msg = 'Unknown background data type: {}'.format(bg) raise ValueError(msg) # depends on [control=['if'], data=['bg']]
def grid_destroy_from_ids(oargrid_jobids): """Destroy all the jobs with corresponding ids Args: oargrid_jobids (list): the ``(site, oar_job_id)`` list of tuple identifying the jobs for each site. """ jobs = grid_reload_from_ids(oargrid_jobids) for job in jobs: job.delete() logger.info("Killing the jobs %s" % oargrid_jobids)
def function[grid_destroy_from_ids, parameter[oargrid_jobids]]: constant[Destroy all the jobs with corresponding ids Args: oargrid_jobids (list): the ``(site, oar_job_id)`` list of tuple identifying the jobs for each site. ] variable[jobs] assign[=] call[name[grid_reload_from_ids], parameter[name[oargrid_jobids]]] for taget[name[job]] in starred[name[jobs]] begin[:] call[name[job].delete, parameter[]] call[name[logger].info, parameter[binary_operation[constant[Killing the jobs %s] <ast.Mod object at 0x7da2590d6920> name[oargrid_jobids]]]]
keyword[def] identifier[grid_destroy_from_ids] ( identifier[oargrid_jobids] ): literal[string] identifier[jobs] = identifier[grid_reload_from_ids] ( identifier[oargrid_jobids] ) keyword[for] identifier[job] keyword[in] identifier[jobs] : identifier[job] . identifier[delete] () identifier[logger] . identifier[info] ( literal[string] % identifier[oargrid_jobids] )
def grid_destroy_from_ids(oargrid_jobids): """Destroy all the jobs with corresponding ids Args: oargrid_jobids (list): the ``(site, oar_job_id)`` list of tuple identifying the jobs for each site. """ jobs = grid_reload_from_ids(oargrid_jobids) for job in jobs: job.delete() logger.info('Killing the jobs %s' % oargrid_jobids) # depends on [control=['for'], data=['job']]
def INIT_TLS_SESSION(self): """ XXX We should offer the right key according to the client's suites. For now server_rsa_key is only used for RSAkx, but we should try to replace every server_key with both server_rsa_key and server_ecdsa_key. """ self.cur_session = tlsSession(connection_end="server") self.cur_session.server_certs = [self.mycert] self.cur_session.server_key = self.mykey if isinstance(self.mykey, PrivKeyRSA): self.cur_session.server_rsa_key = self.mykey # elif isinstance(self.mykey, PrivKeyECDSA): # self.cur_session.server_ecdsa_key = self.mykey raise self.WAITING_CLIENTFLIGHT1()
def function[INIT_TLS_SESSION, parameter[self]]: constant[ XXX We should offer the right key according to the client's suites. For now server_rsa_key is only used for RSAkx, but we should try to replace every server_key with both server_rsa_key and server_ecdsa_key. ] name[self].cur_session assign[=] call[name[tlsSession], parameter[]] name[self].cur_session.server_certs assign[=] list[[<ast.Attribute object at 0x7da1b21be350>]] name[self].cur_session.server_key assign[=] name[self].mykey if call[name[isinstance], parameter[name[self].mykey, name[PrivKeyRSA]]] begin[:] name[self].cur_session.server_rsa_key assign[=] name[self].mykey <ast.Raise object at 0x7da1b21bd270>
keyword[def] identifier[INIT_TLS_SESSION] ( identifier[self] ): literal[string] identifier[self] . identifier[cur_session] = identifier[tlsSession] ( identifier[connection_end] = literal[string] ) identifier[self] . identifier[cur_session] . identifier[server_certs] =[ identifier[self] . identifier[mycert] ] identifier[self] . identifier[cur_session] . identifier[server_key] = identifier[self] . identifier[mykey] keyword[if] identifier[isinstance] ( identifier[self] . identifier[mykey] , identifier[PrivKeyRSA] ): identifier[self] . identifier[cur_session] . identifier[server_rsa_key] = identifier[self] . identifier[mykey] keyword[raise] identifier[self] . identifier[WAITING_CLIENTFLIGHT1] ()
def INIT_TLS_SESSION(self): """ XXX We should offer the right key according to the client's suites. For now server_rsa_key is only used for RSAkx, but we should try to replace every server_key with both server_rsa_key and server_ecdsa_key. """ self.cur_session = tlsSession(connection_end='server') self.cur_session.server_certs = [self.mycert] self.cur_session.server_key = self.mykey if isinstance(self.mykey, PrivKeyRSA): self.cur_session.server_rsa_key = self.mykey # depends on [control=['if'], data=[]] # elif isinstance(self.mykey, PrivKeyECDSA): # self.cur_session.server_ecdsa_key = self.mykey raise self.WAITING_CLIENTFLIGHT1()
def complex2wav(filename,rate,x): """ Save a complex signal vector to a wav file for compact binary storage of 16-bit signal samples. The wav left and right channels are used to save real (I) and imaginary (Q) values. The rate is just a convent way of documenting the original signal sample rate. complex2wav(filename,rate,x) Mark Wickert April 2014 """ x_wav = np.hstack((np.array([x.real]).T,np.array([x.imag]).T)) ss.to_wav(filename, rate, x_wav) print('Saved as binary wav file with (I,Q)<=>(L,R)')
def function[complex2wav, parameter[filename, rate, x]]: constant[ Save a complex signal vector to a wav file for compact binary storage of 16-bit signal samples. The wav left and right channels are used to save real (I) and imaginary (Q) values. The rate is just a convent way of documenting the original signal sample rate. complex2wav(filename,rate,x) Mark Wickert April 2014 ] variable[x_wav] assign[=] call[name[np].hstack, parameter[tuple[[<ast.Attribute object at 0x7da20c6e6cb0>, <ast.Attribute object at 0x7da20c6e75b0>]]]] call[name[ss].to_wav, parameter[name[filename], name[rate], name[x_wav]]] call[name[print], parameter[constant[Saved as binary wav file with (I,Q)<=>(L,R)]]]
keyword[def] identifier[complex2wav] ( identifier[filename] , identifier[rate] , identifier[x] ): literal[string] identifier[x_wav] = identifier[np] . identifier[hstack] (( identifier[np] . identifier[array] ([ identifier[x] . identifier[real] ]). identifier[T] , identifier[np] . identifier[array] ([ identifier[x] . identifier[imag] ]). identifier[T] )) identifier[ss] . identifier[to_wav] ( identifier[filename] , identifier[rate] , identifier[x_wav] ) identifier[print] ( literal[string] )
def complex2wav(filename, rate, x): """ Save a complex signal vector to a wav file for compact binary storage of 16-bit signal samples. The wav left and right channels are used to save real (I) and imaginary (Q) values. The rate is just a convent way of documenting the original signal sample rate. complex2wav(filename,rate,x) Mark Wickert April 2014 """ x_wav = np.hstack((np.array([x.real]).T, np.array([x.imag]).T)) ss.to_wav(filename, rate, x_wav) print('Saved as binary wav file with (I,Q)<=>(L,R)')
def sequencenames(contigsfile): """ Takes a multifasta file and returns a list of sequence names :param contigsfile: multifasta of all sequences :return: list of all sequence names """ sequences = list() for record in SeqIO.parse(open(contigsfile, "rU", encoding="iso-8859-15"), "fasta"): sequences.append(record.id) return sequences
def function[sequencenames, parameter[contigsfile]]: constant[ Takes a multifasta file and returns a list of sequence names :param contigsfile: multifasta of all sequences :return: list of all sequence names ] variable[sequences] assign[=] call[name[list], parameter[]] for taget[name[record]] in starred[call[name[SeqIO].parse, parameter[call[name[open], parameter[name[contigsfile], constant[rU]]], constant[fasta]]]] begin[:] call[name[sequences].append, parameter[name[record].id]] return[name[sequences]]
keyword[def] identifier[sequencenames] ( identifier[contigsfile] ): literal[string] identifier[sequences] = identifier[list] () keyword[for] identifier[record] keyword[in] identifier[SeqIO] . identifier[parse] ( identifier[open] ( identifier[contigsfile] , literal[string] , identifier[encoding] = literal[string] ), literal[string] ): identifier[sequences] . identifier[append] ( identifier[record] . identifier[id] ) keyword[return] identifier[sequences]
def sequencenames(contigsfile): """ Takes a multifasta file and returns a list of sequence names :param contigsfile: multifasta of all sequences :return: list of all sequence names """ sequences = list() for record in SeqIO.parse(open(contigsfile, 'rU', encoding='iso-8859-15'), 'fasta'): sequences.append(record.id) # depends on [control=['for'], data=['record']] return sequences
def _PrintStorageInformationAsJSON(self, storage_reader): """Writes a summary of sessions as machine-readable JSON. Args: storage_reader (StorageReader): storage reader. """ serializer = json_serializer.JSONAttributeContainerSerializer storage_counters = self._CalculateStorageCounters(storage_reader) storage_counters_json = json.dumps(storage_counters) self._output_writer.Write('{') self._output_writer.Write('"storage_counters": {0:s}'.format( storage_counters_json)) self._output_writer.Write(',\n') self._output_writer.Write(' "sessions": {') for index, session in enumerate(storage_reader.GetSessions()): json_string = serializer.WriteSerialized(session) if index != 0: self._output_writer.Write(',\n') self._output_writer.Write('"session_{0:s}": {1:s} '.format( session.identifier, json_string)) self._output_writer.Write('}}')
def function[_PrintStorageInformationAsJSON, parameter[self, storage_reader]]: constant[Writes a summary of sessions as machine-readable JSON. Args: storage_reader (StorageReader): storage reader. ] variable[serializer] assign[=] name[json_serializer].JSONAttributeContainerSerializer variable[storage_counters] assign[=] call[name[self]._CalculateStorageCounters, parameter[name[storage_reader]]] variable[storage_counters_json] assign[=] call[name[json].dumps, parameter[name[storage_counters]]] call[name[self]._output_writer.Write, parameter[constant[{]]] call[name[self]._output_writer.Write, parameter[call[constant["storage_counters": {0:s}].format, parameter[name[storage_counters_json]]]]] call[name[self]._output_writer.Write, parameter[constant[, ]]] call[name[self]._output_writer.Write, parameter[constant[ "sessions": {]]] for taget[tuple[[<ast.Name object at 0x7da20e957730>, <ast.Name object at 0x7da20e956ef0>]]] in starred[call[name[enumerate], parameter[call[name[storage_reader].GetSessions, parameter[]]]]] begin[:] variable[json_string] assign[=] call[name[serializer].WriteSerialized, parameter[name[session]]] if compare[name[index] not_equal[!=] constant[0]] begin[:] call[name[self]._output_writer.Write, parameter[constant[, ]]] call[name[self]._output_writer.Write, parameter[call[constant["session_{0:s}": {1:s} ].format, parameter[name[session].identifier, name[json_string]]]]] call[name[self]._output_writer.Write, parameter[constant[}}]]]
keyword[def] identifier[_PrintStorageInformationAsJSON] ( identifier[self] , identifier[storage_reader] ): literal[string] identifier[serializer] = identifier[json_serializer] . identifier[JSONAttributeContainerSerializer] identifier[storage_counters] = identifier[self] . identifier[_CalculateStorageCounters] ( identifier[storage_reader] ) identifier[storage_counters_json] = identifier[json] . identifier[dumps] ( identifier[storage_counters] ) identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] ) identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] . identifier[format] ( identifier[storage_counters_json] )) identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] ) identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] ) keyword[for] identifier[index] , identifier[session] keyword[in] identifier[enumerate] ( identifier[storage_reader] . identifier[GetSessions] ()): identifier[json_string] = identifier[serializer] . identifier[WriteSerialized] ( identifier[session] ) keyword[if] identifier[index] != literal[int] : identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] ) identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] . identifier[format] ( identifier[session] . identifier[identifier] , identifier[json_string] )) identifier[self] . identifier[_output_writer] . identifier[Write] ( literal[string] )
def _PrintStorageInformationAsJSON(self, storage_reader): """Writes a summary of sessions as machine-readable JSON. Args: storage_reader (StorageReader): storage reader. """ serializer = json_serializer.JSONAttributeContainerSerializer storage_counters = self._CalculateStorageCounters(storage_reader) storage_counters_json = json.dumps(storage_counters) self._output_writer.Write('{') self._output_writer.Write('"storage_counters": {0:s}'.format(storage_counters_json)) self._output_writer.Write(',\n') self._output_writer.Write(' "sessions": {') for (index, session) in enumerate(storage_reader.GetSessions()): json_string = serializer.WriteSerialized(session) if index != 0: self._output_writer.Write(',\n') # depends on [control=['if'], data=[]] self._output_writer.Write('"session_{0:s}": {1:s} '.format(session.identifier, json_string)) # depends on [control=['for'], data=[]] self._output_writer.Write('}}')
def ad_unif_inf(statistic): """ Approximates the limiting distribution to about 5 decimal digits. """ z = statistic if z < 2: return (exp(-1.2337141 / z) / sqrt(z) * (2.00012 + (.247105 - (.0649821 - (.0347962 - (.011672 - .00168691 * z) * z) * z) * z) * z)) else: return exp(-exp(1.0776 - (2.30695 - (.43424 - (.082433 - (.008056 - .0003146 * z) * z) * z) * z) * z))
def function[ad_unif_inf, parameter[statistic]]: constant[ Approximates the limiting distribution to about 5 decimal digits. ] variable[z] assign[=] name[statistic] if compare[name[z] less[<] constant[2]] begin[:] return[binary_operation[binary_operation[call[name[exp], parameter[binary_operation[<ast.UnaryOp object at 0x7da1b1878160> / name[z]]]] / call[name[sqrt], parameter[name[z]]]] * binary_operation[constant[2.00012] + binary_operation[binary_operation[constant[0.247105] - binary_operation[binary_operation[constant[0.0649821] - binary_operation[binary_operation[constant[0.0347962] - binary_operation[binary_operation[constant[0.011672] - binary_operation[constant[0.00168691] * name[z]]] * name[z]]] * name[z]]] * name[z]]] * name[z]]]]]
keyword[def] identifier[ad_unif_inf] ( identifier[statistic] ): literal[string] identifier[z] = identifier[statistic] keyword[if] identifier[z] < literal[int] : keyword[return] ( identifier[exp] (- literal[int] / identifier[z] )/ identifier[sqrt] ( identifier[z] )* ( literal[int] +( literal[int] -( literal[int] -( literal[int] - ( literal[int] - literal[int] * identifier[z] )* identifier[z] )* identifier[z] )* identifier[z] )* identifier[z] )) keyword[else] : keyword[return] identifier[exp] (- identifier[exp] ( literal[int] -( literal[int] -( literal[int] -( literal[int] - ( literal[int] - literal[int] * identifier[z] )* identifier[z] )* identifier[z] )* identifier[z] )* identifier[z] ))
def ad_unif_inf(statistic): """ Approximates the limiting distribution to about 5 decimal digits. """ z = statistic if z < 2: return exp(-1.2337141 / z) / sqrt(z) * (2.00012 + (0.247105 - (0.0649821 - (0.0347962 - (0.011672 - 0.00168691 * z) * z) * z) * z) * z) # depends on [control=['if'], data=['z']] else: return exp(-exp(1.0776 - (2.30695 - (0.43424 - (0.082433 - (0.008056 - 0.0003146 * z) * z) * z) * z) * z))
def print_hex(self, value, justify_right=True): """Print a numeric value in hexadecimal. Value should be from 0 to FFFF. """ if value < 0 or value > 0xFFFF: # Ignore out of range values. return self.print_number_str('{0:X}'.format(value), justify_right)
def function[print_hex, parameter[self, value, justify_right]]: constant[Print a numeric value in hexadecimal. Value should be from 0 to FFFF. ] if <ast.BoolOp object at 0x7da1b10433a0> begin[:] return[None] call[name[self].print_number_str, parameter[call[constant[{0:X}].format, parameter[name[value]]], name[justify_right]]]
keyword[def] identifier[print_hex] ( identifier[self] , identifier[value] , identifier[justify_right] = keyword[True] ): literal[string] keyword[if] identifier[value] < literal[int] keyword[or] identifier[value] > literal[int] : keyword[return] identifier[self] . identifier[print_number_str] ( literal[string] . identifier[format] ( identifier[value] ), identifier[justify_right] )
def print_hex(self, value, justify_right=True): """Print a numeric value in hexadecimal. Value should be from 0 to FFFF. """ if value < 0 or value > 65535: # Ignore out of range values. return # depends on [control=['if'], data=[]] self.print_number_str('{0:X}'.format(value), justify_right)
def select_tmpltbank_class(curr_exe): """ This function returns a class that is appropriate for setting up template bank jobs within workflow. Parameters ---------- curr_exe : string The name of the executable to be used for generating template banks. Returns -------- exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility functions appropriate for the given executable. Instances of the class ('jobs') **must** have methods * job.create_node() and * job.get_valid_times(ifo, ) """ exe_to_class_map = { 'pycbc_geom_nonspinbank' : PyCBCTmpltbankExecutable, 'pycbc_aligned_stoch_bank': PyCBCTmpltbankExecutable } try: return exe_to_class_map[curr_exe] except KeyError: raise NotImplementedError( "No job class exists for executable %s, exiting" % curr_exe)
def function[select_tmpltbank_class, parameter[curr_exe]]: constant[ This function returns a class that is appropriate for setting up template bank jobs within workflow. Parameters ---------- curr_exe : string The name of the executable to be used for generating template banks. Returns -------- exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility functions appropriate for the given executable. Instances of the class ('jobs') **must** have methods * job.create_node() and * job.get_valid_times(ifo, ) ] variable[exe_to_class_map] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b3550>, <ast.Constant object at 0x7da20e9b3a30>], [<ast.Name object at 0x7da20e9b1d20>, <ast.Name object at 0x7da20e9b18d0>]] <ast.Try object at 0x7da20e9b31c0>
keyword[def] identifier[select_tmpltbank_class] ( identifier[curr_exe] ): literal[string] identifier[exe_to_class_map] ={ literal[string] : identifier[PyCBCTmpltbankExecutable] , literal[string] : identifier[PyCBCTmpltbankExecutable] } keyword[try] : keyword[return] identifier[exe_to_class_map] [ identifier[curr_exe] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[NotImplementedError] ( literal[string] % identifier[curr_exe] )
def select_tmpltbank_class(curr_exe): """ This function returns a class that is appropriate for setting up template bank jobs within workflow. Parameters ---------- curr_exe : string The name of the executable to be used for generating template banks. Returns -------- exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility functions appropriate for the given executable. Instances of the class ('jobs') **must** have methods * job.create_node() and * job.get_valid_times(ifo, ) """ exe_to_class_map = {'pycbc_geom_nonspinbank': PyCBCTmpltbankExecutable, 'pycbc_aligned_stoch_bank': PyCBCTmpltbankExecutable} try: return exe_to_class_map[curr_exe] # depends on [control=['try'], data=[]] except KeyError: raise NotImplementedError('No job class exists for executable %s, exiting' % curr_exe) # depends on [control=['except'], data=[]]
def CleanName(name): """Perform generic name cleaning.""" name = re.sub('[^_A-Za-z0-9]', '_', name) if name[0].isdigit(): name = '_%s' % name while keyword.iskeyword(name): name = '%s_' % name # If we end up with __ as a prefix, we'll run afoul of python # field renaming, so we manually correct for it. if name.startswith('__'): name = 'f%s' % name return name
def function[CleanName, parameter[name]]: constant[Perform generic name cleaning.] variable[name] assign[=] call[name[re].sub, parameter[constant[[^_A-Za-z0-9]], constant[_], name[name]]] if call[call[name[name]][constant[0]].isdigit, parameter[]] begin[:] variable[name] assign[=] binary_operation[constant[_%s] <ast.Mod object at 0x7da2590d6920> name[name]] while call[name[keyword].iskeyword, parameter[name[name]]] begin[:] variable[name] assign[=] binary_operation[constant[%s_] <ast.Mod object at 0x7da2590d6920> name[name]] if call[name[name].startswith, parameter[constant[__]]] begin[:] variable[name] assign[=] binary_operation[constant[f%s] <ast.Mod object at 0x7da2590d6920> name[name]] return[name[name]]
keyword[def] identifier[CleanName] ( identifier[name] ): literal[string] identifier[name] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[name] ) keyword[if] identifier[name] [ literal[int] ]. identifier[isdigit] (): identifier[name] = literal[string] % identifier[name] keyword[while] identifier[keyword] . identifier[iskeyword] ( identifier[name] ): identifier[name] = literal[string] % identifier[name] keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): identifier[name] = literal[string] % identifier[name] keyword[return] identifier[name]
def CleanName(name): """Perform generic name cleaning.""" name = re.sub('[^_A-Za-z0-9]', '_', name) if name[0].isdigit(): name = '_%s' % name # depends on [control=['if'], data=[]] while keyword.iskeyword(name): name = '%s_' % name # depends on [control=['while'], data=[]] # If we end up with __ as a prefix, we'll run afoul of python # field renaming, so we manually correct for it. if name.startswith('__'): name = 'f%s' % name # depends on [control=['if'], data=[]] return name
def get_compositions_by_search(self, composition_query, composition_search): """Gets the search results matching the given search query using the given search. arg: composition_query (osid.repository.CompositionQuery): the composition query arg: composition_search (osid.repository.CompositionSearch): the composition search return: (osid.repository.CompositionSearchResults) - the composition search results raise: NullArgument - ``composition_query`` or ``composition_search`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``composition_query`` or ``composition_search`` is not of this service *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceSearchSession.get_resources_by_search_template # Copied from osid.resource.ResourceQuerySession.get_resources_by_query_template and_list = list() or_list = list() for term in composition_query._query_terms: and_list.append({term: composition_query._query_terms[term]}) for term in composition_query._keyword_terms: or_list.append({term: composition_query._keyword_terms[term]}) if composition_search._id_list is not None: identifiers = [ObjectId(i.identifier) for i in composition_search._id_list] and_list.append({'_id': {'$in': identifiers}}) if or_list: and_list.append({'$or': or_list}) view_filter = self._view_filter() if view_filter: and_list.append(view_filter) if and_list: query_terms = {'$and': and_list} collection = JSONClientValidated('repository', collection='Composition', runtime=self._runtime) if composition_search.start is not None and composition_search.end is not None: result = collection.find(query_terms)[composition_search.start:composition_search.end] else: result = collection.find(query_terms) return searches.CompositionSearchResults(result, dict(composition_query._query_terms), runtime=self._runtime)
def function[get_compositions_by_search, parameter[self, composition_query, composition_search]]: constant[Gets the search results matching the given search query using the given search. arg: composition_query (osid.repository.CompositionQuery): the composition query arg: composition_search (osid.repository.CompositionSearch): the composition search return: (osid.repository.CompositionSearchResults) - the composition search results raise: NullArgument - ``composition_query`` or ``composition_search`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``composition_query`` or ``composition_search`` is not of this service *compliance: mandatory -- This method must be implemented.* ] variable[and_list] assign[=] call[name[list], parameter[]] variable[or_list] assign[=] call[name[list], parameter[]] for taget[name[term]] in starred[name[composition_query]._query_terms] begin[:] call[name[and_list].append, parameter[dictionary[[<ast.Name object at 0x7da1b0972ef0>], [<ast.Subscript object at 0x7da1b0971ab0>]]]] for taget[name[term]] in starred[name[composition_query]._keyword_terms] begin[:] call[name[or_list].append, parameter[dictionary[[<ast.Name object at 0x7da1b09719c0>], [<ast.Subscript object at 0x7da1b0972e60>]]]] if compare[name[composition_search]._id_list is_not constant[None]] begin[:] variable[identifiers] assign[=] <ast.ListComp object at 0x7da1b0973100> call[name[and_list].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0973520>], [<ast.Dict object at 0x7da1b0972440>]]]] if name[or_list] begin[:] call[name[and_list].append, parameter[dictionary[[<ast.Constant object at 0x7da1b0971d50>], [<ast.Name object at 0x7da1b0973dc0>]]]] variable[view_filter] assign[=] call[name[self]._view_filter, parameter[]] if name[view_filter] begin[:] call[name[and_list].append, parameter[name[view_filter]]] if name[and_list] begin[:] variable[query_terms] assign[=] dictionary[[<ast.Constant object at 0x7da1b09735e0>], [<ast.Name object at 0x7da1b0970100>]] variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[repository]]] if <ast.BoolOp object at 0x7da20c7cb220> begin[:] variable[result] assign[=] call[call[name[collection].find, parameter[name[query_terms]]]][<ast.Slice object at 0x7da20c7cbd00>] return[call[name[searches].CompositionSearchResults, parameter[name[result], call[name[dict], parameter[name[composition_query]._query_terms]]]]]
keyword[def] identifier[get_compositions_by_search] ( identifier[self] , identifier[composition_query] , identifier[composition_search] ): literal[string] identifier[and_list] = identifier[list] () identifier[or_list] = identifier[list] () keyword[for] identifier[term] keyword[in] identifier[composition_query] . identifier[_query_terms] : identifier[and_list] . identifier[append] ({ identifier[term] : identifier[composition_query] . identifier[_query_terms] [ identifier[term] ]}) keyword[for] identifier[term] keyword[in] identifier[composition_query] . identifier[_keyword_terms] : identifier[or_list] . identifier[append] ({ identifier[term] : identifier[composition_query] . identifier[_keyword_terms] [ identifier[term] ]}) keyword[if] identifier[composition_search] . identifier[_id_list] keyword[is] keyword[not] keyword[None] : identifier[identifiers] =[ identifier[ObjectId] ( identifier[i] . identifier[identifier] ) keyword[for] identifier[i] keyword[in] identifier[composition_search] . identifier[_id_list] ] identifier[and_list] . identifier[append] ({ literal[string] :{ literal[string] : identifier[identifiers] }}) keyword[if] identifier[or_list] : identifier[and_list] . identifier[append] ({ literal[string] : identifier[or_list] }) identifier[view_filter] = identifier[self] . identifier[_view_filter] () keyword[if] identifier[view_filter] : identifier[and_list] . identifier[append] ( identifier[view_filter] ) keyword[if] identifier[and_list] : identifier[query_terms] ={ literal[string] : identifier[and_list] } identifier[collection] = identifier[JSONClientValidated] ( literal[string] , identifier[collection] = literal[string] , identifier[runtime] = identifier[self] . identifier[_runtime] ) keyword[if] identifier[composition_search] . identifier[start] keyword[is] keyword[not] keyword[None] keyword[and] identifier[composition_search] . identifier[end] keyword[is] keyword[not] keyword[None] : identifier[result] = identifier[collection] . identifier[find] ( identifier[query_terms] )[ identifier[composition_search] . identifier[start] : identifier[composition_search] . identifier[end] ] keyword[else] : identifier[result] = identifier[collection] . identifier[find] ( identifier[query_terms] ) keyword[return] identifier[searches] . identifier[CompositionSearchResults] ( identifier[result] , identifier[dict] ( identifier[composition_query] . identifier[_query_terms] ), identifier[runtime] = identifier[self] . identifier[_runtime] )
def get_compositions_by_search(self, composition_query, composition_search): """Gets the search results matching the given search query using the given search. arg: composition_query (osid.repository.CompositionQuery): the composition query arg: composition_search (osid.repository.CompositionSearch): the composition search return: (osid.repository.CompositionSearchResults) - the composition search results raise: NullArgument - ``composition_query`` or ``composition_search`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``composition_query`` or ``composition_search`` is not of this service *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceSearchSession.get_resources_by_search_template # Copied from osid.resource.ResourceQuerySession.get_resources_by_query_template and_list = list() or_list = list() for term in composition_query._query_terms: and_list.append({term: composition_query._query_terms[term]}) # depends on [control=['for'], data=['term']] for term in composition_query._keyword_terms: or_list.append({term: composition_query._keyword_terms[term]}) # depends on [control=['for'], data=['term']] if composition_search._id_list is not None: identifiers = [ObjectId(i.identifier) for i in composition_search._id_list] and_list.append({'_id': {'$in': identifiers}}) # depends on [control=['if'], data=[]] if or_list: and_list.append({'$or': or_list}) # depends on [control=['if'], data=[]] view_filter = self._view_filter() if view_filter: and_list.append(view_filter) # depends on [control=['if'], data=[]] if and_list: query_terms = {'$and': and_list} # depends on [control=['if'], data=[]] collection = JSONClientValidated('repository', collection='Composition', runtime=self._runtime) if composition_search.start is not None and composition_search.end is not None: result = collection.find(query_terms)[composition_search.start:composition_search.end] # depends on [control=['if'], data=[]] else: result = collection.find(query_terms) return searches.CompositionSearchResults(result, dict(composition_query._query_terms), runtime=self._runtime)
def is_ratio_different(min_ratio, study_go, study_n, pop_go, pop_n): """ check if the ratio go /n is different between the study group and the population """ if min_ratio is None: return True stu_ratio = float(study_go) / study_n pop_ratio = float(pop_go) / pop_n if stu_ratio == 0.0: stu_ratio = 0.0000001 if pop_ratio == 0.0: pop_ratio = 0.0000001 if stu_ratio > pop_ratio: return stu_ratio / pop_ratio > min_ratio return pop_ratio / stu_ratio > min_ratio
def function[is_ratio_different, parameter[min_ratio, study_go, study_n, pop_go, pop_n]]: constant[ check if the ratio go /n is different between the study group and the population ] if compare[name[min_ratio] is constant[None]] begin[:] return[constant[True]] variable[stu_ratio] assign[=] binary_operation[call[name[float], parameter[name[study_go]]] / name[study_n]] variable[pop_ratio] assign[=] binary_operation[call[name[float], parameter[name[pop_go]]] / name[pop_n]] if compare[name[stu_ratio] equal[==] constant[0.0]] begin[:] variable[stu_ratio] assign[=] constant[1e-07] if compare[name[pop_ratio] equal[==] constant[0.0]] begin[:] variable[pop_ratio] assign[=] constant[1e-07] if compare[name[stu_ratio] greater[>] name[pop_ratio]] begin[:] return[compare[binary_operation[name[stu_ratio] / name[pop_ratio]] greater[>] name[min_ratio]]] return[compare[binary_operation[name[pop_ratio] / name[stu_ratio]] greater[>] name[min_ratio]]]
keyword[def] identifier[is_ratio_different] ( identifier[min_ratio] , identifier[study_go] , identifier[study_n] , identifier[pop_go] , identifier[pop_n] ): literal[string] keyword[if] identifier[min_ratio] keyword[is] keyword[None] : keyword[return] keyword[True] identifier[stu_ratio] = identifier[float] ( identifier[study_go] )/ identifier[study_n] identifier[pop_ratio] = identifier[float] ( identifier[pop_go] )/ identifier[pop_n] keyword[if] identifier[stu_ratio] == literal[int] : identifier[stu_ratio] = literal[int] keyword[if] identifier[pop_ratio] == literal[int] : identifier[pop_ratio] = literal[int] keyword[if] identifier[stu_ratio] > identifier[pop_ratio] : keyword[return] identifier[stu_ratio] / identifier[pop_ratio] > identifier[min_ratio] keyword[return] identifier[pop_ratio] / identifier[stu_ratio] > identifier[min_ratio]
def is_ratio_different(min_ratio, study_go, study_n, pop_go, pop_n): """ check if the ratio go /n is different between the study group and the population """ if min_ratio is None: return True # depends on [control=['if'], data=[]] stu_ratio = float(study_go) / study_n pop_ratio = float(pop_go) / pop_n if stu_ratio == 0.0: stu_ratio = 1e-07 # depends on [control=['if'], data=['stu_ratio']] if pop_ratio == 0.0: pop_ratio = 1e-07 # depends on [control=['if'], data=['pop_ratio']] if stu_ratio > pop_ratio: return stu_ratio / pop_ratio > min_ratio # depends on [control=['if'], data=['stu_ratio', 'pop_ratio']] return pop_ratio / stu_ratio > min_ratio
def ctx() -> moderngl.Context: """ModernGL context""" win = window() if not win.ctx: raise RuntimeError("Attempting to get context before creation") return win.ctx
def function[ctx, parameter[]]: constant[ModernGL context] variable[win] assign[=] call[name[window], parameter[]] if <ast.UnaryOp object at 0x7da18f58e350> begin[:] <ast.Raise object at 0x7da18f58ec50> return[name[win].ctx]
keyword[def] identifier[ctx] ()-> identifier[moderngl] . identifier[Context] : literal[string] identifier[win] = identifier[window] () keyword[if] keyword[not] identifier[win] . identifier[ctx] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[return] identifier[win] . identifier[ctx]
def ctx() -> moderngl.Context: """ModernGL context""" win = window() if not win.ctx: raise RuntimeError('Attempting to get context before creation') # depends on [control=['if'], data=[]] return win.ctx
def abort_running(self) -> bool: """ Executes a hard abort by shutting down the event loop in this thread in which the running command was operating. This is carried out using the asyncio library to prevent the stopped execution from destabilizing the Python environment. """ if not self._loop: return False try: self._loop.stop() return True except Exception: return False finally: self.completed_at = datetime.utcnow()
def function[abort_running, parameter[self]]: constant[ Executes a hard abort by shutting down the event loop in this thread in which the running command was operating. This is carried out using the asyncio library to prevent the stopped execution from destabilizing the Python environment. ] if <ast.UnaryOp object at 0x7da20c991600> begin[:] return[constant[False]] <ast.Try object at 0x7da20c9929e0>
keyword[def] identifier[abort_running] ( identifier[self] )-> identifier[bool] : literal[string] keyword[if] keyword[not] identifier[self] . identifier[_loop] : keyword[return] keyword[False] keyword[try] : identifier[self] . identifier[_loop] . identifier[stop] () keyword[return] keyword[True] keyword[except] identifier[Exception] : keyword[return] keyword[False] keyword[finally] : identifier[self] . identifier[completed_at] = identifier[datetime] . identifier[utcnow] ()
def abort_running(self) -> bool: """ Executes a hard abort by shutting down the event loop in this thread in which the running command was operating. This is carried out using the asyncio library to prevent the stopped execution from destabilizing the Python environment. """ if not self._loop: return False # depends on [control=['if'], data=[]] try: self._loop.stop() return True # depends on [control=['try'], data=[]] except Exception: return False # depends on [control=['except'], data=[]] finally: self.completed_at = datetime.utcnow()
def read_credentials(fname): """ read a simple text file from a private location to get username and password """ with open(fname, 'r') as f: username = f.readline().strip('\n') password = f.readline().strip('\n') return username, password
def function[read_credentials, parameter[fname]]: constant[ read a simple text file from a private location to get username and password ] with call[name[open], parameter[name[fname], constant[r]]] begin[:] variable[username] assign[=] call[call[name[f].readline, parameter[]].strip, parameter[constant[ ]]] variable[password] assign[=] call[call[name[f].readline, parameter[]].strip, parameter[constant[ ]]] return[tuple[[<ast.Name object at 0x7da18fe92440>, <ast.Name object at 0x7da18fe93520>]]]
keyword[def] identifier[read_credentials] ( identifier[fname] ): literal[string] keyword[with] identifier[open] ( identifier[fname] , literal[string] ) keyword[as] identifier[f] : identifier[username] = identifier[f] . identifier[readline] (). identifier[strip] ( literal[string] ) identifier[password] = identifier[f] . identifier[readline] (). identifier[strip] ( literal[string] ) keyword[return] identifier[username] , identifier[password]
def read_credentials(fname): """ read a simple text file from a private location to get username and password """ with open(fname, 'r') as f: username = f.readline().strip('\n') password = f.readline().strip('\n') # depends on [control=['with'], data=['f']] return (username, password)
def read_creds(profile_name, csv_file = None, mfa_serial_arg = None, mfa_code = None, force_init = False, role_session_name = 'opinel'): """ Read credentials from anywhere (CSV, Environment, Instance metadata, config/credentials) :param profile_name: :param csv_file: :param mfa_serial_arg: :param mfa_code: :param force_init: :param role_session_name: :return: """ first_sts_session = False source_profile = None role_mfa_serial = None expiration = None credentials = init_creds() role_arn, external_id = read_profile_from_environment_variables() if csv_file: # Read credentials from a CSV file that was provided credentials['AccessKeyId'], credentials['SecretAccessKey'], credentials['SerialNumber'] = \ read_creds_from_csv(csv_file) elif profile_name == 'default': # Try reading credentials from environment variables (Issue #11) if the profile name is 'default' credentials = read_creds_from_environment_variables() if ('AccessKeyId' not in credentials or not credentials['AccessKeyId']) \ and not csv_file and profile_name == 'default': ec2_credentials = read_creds_from_ec2_instance_metadata() if ec2_credentials: credentials = ec2_credentials else: ecs_credentials = read_creds_from_ecs_container_metadata() if ecs_credentials: credentials = ecs_credentials # TODO support lambda if role_arn or (not credentials['AccessKeyId'] and not csv_file): # Lookup if a role is defined in ~/.aws/config if not role_arn: role_arn, source_profile, role_mfa_serial, external_id = read_profile_from_aws_config_file(profile_name) # Scout2 issue 237 - credentials file may be used to configure role-based profiles... if not role_arn: role_arn, source_profile, role_mfa_serial, external_id = \ read_profile_from_aws_config_file(profile_name, config_file = aws_credentials_file) if role_arn: # Lookup cached credentials try: cached_credentials_filename = get_cached_credentials_filename(profile_name, role_arn) with open(cached_credentials_filename, 'rt') as f: assume_role_data = json.load(f) oldcred = credentials credentials = assume_role_data['Credentials'] expiration = dateutil.parser.parse(credentials['Expiration']) expiration = expiration.replace(tzinfo=None) current = datetime.datetime.utcnow() if expiration < current: print('Role\'s credentials have expired on %s' % credentials['Expiration']) credentials = oldcred except Exception as e: pass if not expiration or expiration < current or credentials['AccessKeyId'] == None: if source_profile: credentials = read_creds(source_profile) if role_mfa_serial: credentials['SerialNumber'] = role_mfa_serial # Auto prompt for a code... if not mfa_code: credentials['TokenCode'] = prompt_4_mfa_code() if external_id: credentials['ExternalId'] = external_id credentials = assume_role(profile_name, credentials, role_arn, role_session_name) # Read from ~/.aws/credentials else: credentials = read_creds_from_aws_credentials_file(profile_name) if credentials['SessionToken']: if 'Expiration' in credentials and credentials['Expiration']: expiration = dateutil.parser.parse(credentials['Expiration']) expiration = expiration.replace(tzinfo=None) current = datetime.datetime.utcnow() if expiration < current: printInfo('Saved STS credentials expired on %s' % credentials['Expiration']) force_init = True else: force_init = True sts_credentials = credentials else: first_sts_session = True if force_init or (mfa_serial_arg and mfa_code): credentials = read_creds_from_aws_credentials_file(profile_name if first_sts_session else '%s-nomfa' % profile_name) if not credentials['AccessKeyId']: printInfo('Warning: Unable to determine STS token expiration; later API calls may fail.') credentials = sts_credentials else: if mfa_serial_arg: credentials['SerialNumber'] = mfa_serial_arg if mfa_code: credentials['TokenCode'] = mfa_code if 'AccessKeyId' in credentials and credentials['AccessKeyId']: credentials = init_sts_session(profile_name, credentials) # If we don't have valid creds by now, print an error message if 'AccessKeyId' not in credentials or credentials['AccessKeyId'] == None or \ 'SecretAccessKey' not in credentials or credentials['SecretAccessKey'] == None: printError('Error: could not find AWS credentials. Use the --help option for more information.') if not 'AccessKeyId' in credentials: credentials = { 'AccessKeyId': None } return credentials
def function[read_creds, parameter[profile_name, csv_file, mfa_serial_arg, mfa_code, force_init, role_session_name]]: constant[ Read credentials from anywhere (CSV, Environment, Instance metadata, config/credentials) :param profile_name: :param csv_file: :param mfa_serial_arg: :param mfa_code: :param force_init: :param role_session_name: :return: ] variable[first_sts_session] assign[=] constant[False] variable[source_profile] assign[=] constant[None] variable[role_mfa_serial] assign[=] constant[None] variable[expiration] assign[=] constant[None] variable[credentials] assign[=] call[name[init_creds], parameter[]] <ast.Tuple object at 0x7da1b2523970> assign[=] call[name[read_profile_from_environment_variables], parameter[]] if name[csv_file] begin[:] <ast.Tuple object at 0x7da1b25237f0> assign[=] call[name[read_creds_from_csv], parameter[name[csv_file]]] if <ast.BoolOp object at 0x7da1b25220b0> begin[:] variable[ec2_credentials] assign[=] call[name[read_creds_from_ec2_instance_metadata], parameter[]] if name[ec2_credentials] begin[:] variable[credentials] assign[=] name[ec2_credentials] if <ast.BoolOp object at 0x7da1b2521a80> begin[:] if <ast.UnaryOp object at 0x7da1b25218a0> begin[:] <ast.Tuple object at 0x7da1b2521810> assign[=] call[name[read_profile_from_aws_config_file], parameter[name[profile_name]]] if <ast.UnaryOp object at 0x7da1b2521660> begin[:] <ast.Tuple object at 0x7da1b25215d0> assign[=] call[name[read_profile_from_aws_config_file], parameter[name[profile_name]]] if name[role_arn] begin[:] <ast.Try object at 0x7da1b2521390> if <ast.BoolOp object at 0x7da1b2520b20> begin[:] if name[source_profile] begin[:] variable[credentials] assign[=] call[name[read_creds], parameter[name[source_profile]]] if name[role_mfa_serial] begin[:] call[name[credentials]][constant[SerialNumber]] assign[=] name[role_mfa_serial] if <ast.UnaryOp object at 0x7da1b2520640> begin[:] call[name[credentials]][constant[TokenCode]] assign[=] call[name[prompt_4_mfa_code], parameter[]] if name[external_id] begin[:] call[name[credentials]][constant[ExternalId]] assign[=] name[external_id] variable[credentials] assign[=] call[name[assume_role], parameter[name[profile_name], name[credentials], name[role_arn], name[role_session_name]]] if <ast.BoolOp object at 0x7da1b253f130> begin[:] call[name[printError], parameter[constant[Error: could not find AWS credentials. Use the --help option for more information.]]] if <ast.UnaryOp object at 0x7da1b253cd60> begin[:] variable[credentials] assign[=] dictionary[[<ast.Constant object at 0x7da1b253d7b0>], [<ast.Constant object at 0x7da1b253c9d0>]] return[name[credentials]]
keyword[def] identifier[read_creds] ( identifier[profile_name] , identifier[csv_file] = keyword[None] , identifier[mfa_serial_arg] = keyword[None] , identifier[mfa_code] = keyword[None] , identifier[force_init] = keyword[False] , identifier[role_session_name] = literal[string] ): literal[string] identifier[first_sts_session] = keyword[False] identifier[source_profile] = keyword[None] identifier[role_mfa_serial] = keyword[None] identifier[expiration] = keyword[None] identifier[credentials] = identifier[init_creds] () identifier[role_arn] , identifier[external_id] = identifier[read_profile_from_environment_variables] () keyword[if] identifier[csv_file] : identifier[credentials] [ literal[string] ], identifier[credentials] [ literal[string] ], identifier[credentials] [ literal[string] ]= identifier[read_creds_from_csv] ( identifier[csv_file] ) keyword[elif] identifier[profile_name] == literal[string] : identifier[credentials] = identifier[read_creds_from_environment_variables] () keyword[if] ( literal[string] keyword[not] keyword[in] identifier[credentials] keyword[or] keyword[not] identifier[credentials] [ literal[string] ]) keyword[and] keyword[not] identifier[csv_file] keyword[and] identifier[profile_name] == literal[string] : identifier[ec2_credentials] = identifier[read_creds_from_ec2_instance_metadata] () keyword[if] identifier[ec2_credentials] : identifier[credentials] = identifier[ec2_credentials] keyword[else] : identifier[ecs_credentials] = identifier[read_creds_from_ecs_container_metadata] () keyword[if] identifier[ecs_credentials] : identifier[credentials] = identifier[ecs_credentials] keyword[if] identifier[role_arn] keyword[or] ( keyword[not] identifier[credentials] [ literal[string] ] keyword[and] keyword[not] identifier[csv_file] ): keyword[if] keyword[not] identifier[role_arn] : identifier[role_arn] , identifier[source_profile] , identifier[role_mfa_serial] , identifier[external_id] = identifier[read_profile_from_aws_config_file] ( identifier[profile_name] ) keyword[if] keyword[not] identifier[role_arn] : identifier[role_arn] , identifier[source_profile] , identifier[role_mfa_serial] , identifier[external_id] = identifier[read_profile_from_aws_config_file] ( identifier[profile_name] , identifier[config_file] = identifier[aws_credentials_file] ) keyword[if] identifier[role_arn] : keyword[try] : identifier[cached_credentials_filename] = identifier[get_cached_credentials_filename] ( identifier[profile_name] , identifier[role_arn] ) keyword[with] identifier[open] ( identifier[cached_credentials_filename] , literal[string] ) keyword[as] identifier[f] : identifier[assume_role_data] = identifier[json] . identifier[load] ( identifier[f] ) identifier[oldcred] = identifier[credentials] identifier[credentials] = identifier[assume_role_data] [ literal[string] ] identifier[expiration] = identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[credentials] [ literal[string] ]) identifier[expiration] = identifier[expiration] . identifier[replace] ( identifier[tzinfo] = keyword[None] ) identifier[current] = identifier[datetime] . identifier[datetime] . identifier[utcnow] () keyword[if] identifier[expiration] < identifier[current] : identifier[print] ( literal[string] % identifier[credentials] [ literal[string] ]) identifier[credentials] = identifier[oldcred] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[pass] keyword[if] keyword[not] identifier[expiration] keyword[or] identifier[expiration] < identifier[current] keyword[or] identifier[credentials] [ literal[string] ]== keyword[None] : keyword[if] identifier[source_profile] : identifier[credentials] = identifier[read_creds] ( identifier[source_profile] ) keyword[if] identifier[role_mfa_serial] : identifier[credentials] [ literal[string] ]= identifier[role_mfa_serial] keyword[if] keyword[not] identifier[mfa_code] : identifier[credentials] [ literal[string] ]= identifier[prompt_4_mfa_code] () keyword[if] identifier[external_id] : identifier[credentials] [ literal[string] ]= identifier[external_id] identifier[credentials] = identifier[assume_role] ( identifier[profile_name] , identifier[credentials] , identifier[role_arn] , identifier[role_session_name] ) keyword[else] : identifier[credentials] = identifier[read_creds_from_aws_credentials_file] ( identifier[profile_name] ) keyword[if] identifier[credentials] [ literal[string] ]: keyword[if] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]: identifier[expiration] = identifier[dateutil] . identifier[parser] . identifier[parse] ( identifier[credentials] [ literal[string] ]) identifier[expiration] = identifier[expiration] . identifier[replace] ( identifier[tzinfo] = keyword[None] ) identifier[current] = identifier[datetime] . identifier[datetime] . identifier[utcnow] () keyword[if] identifier[expiration] < identifier[current] : identifier[printInfo] ( literal[string] % identifier[credentials] [ literal[string] ]) identifier[force_init] = keyword[True] keyword[else] : identifier[force_init] = keyword[True] identifier[sts_credentials] = identifier[credentials] keyword[else] : identifier[first_sts_session] = keyword[True] keyword[if] identifier[force_init] keyword[or] ( identifier[mfa_serial_arg] keyword[and] identifier[mfa_code] ): identifier[credentials] = identifier[read_creds_from_aws_credentials_file] ( identifier[profile_name] keyword[if] identifier[first_sts_session] keyword[else] literal[string] % identifier[profile_name] ) keyword[if] keyword[not] identifier[credentials] [ literal[string] ]: identifier[printInfo] ( literal[string] ) identifier[credentials] = identifier[sts_credentials] keyword[else] : keyword[if] identifier[mfa_serial_arg] : identifier[credentials] [ literal[string] ]= identifier[mfa_serial_arg] keyword[if] identifier[mfa_code] : identifier[credentials] [ literal[string] ]= identifier[mfa_code] keyword[if] literal[string] keyword[in] identifier[credentials] keyword[and] identifier[credentials] [ literal[string] ]: identifier[credentials] = identifier[init_sts_session] ( identifier[profile_name] , identifier[credentials] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[credentials] keyword[or] identifier[credentials] [ literal[string] ]== keyword[None] keyword[or] literal[string] keyword[not] keyword[in] identifier[credentials] keyword[or] identifier[credentials] [ literal[string] ]== keyword[None] : identifier[printError] ( literal[string] ) keyword[if] keyword[not] literal[string] keyword[in] identifier[credentials] : identifier[credentials] ={ literal[string] : keyword[None] } keyword[return] identifier[credentials]
def read_creds(profile_name, csv_file=None, mfa_serial_arg=None, mfa_code=None, force_init=False, role_session_name='opinel'): """ Read credentials from anywhere (CSV, Environment, Instance metadata, config/credentials) :param profile_name: :param csv_file: :param mfa_serial_arg: :param mfa_code: :param force_init: :param role_session_name: :return: """ first_sts_session = False source_profile = None role_mfa_serial = None expiration = None credentials = init_creds() (role_arn, external_id) = read_profile_from_environment_variables() if csv_file: # Read credentials from a CSV file that was provided (credentials['AccessKeyId'], credentials['SecretAccessKey'], credentials['SerialNumber']) = read_creds_from_csv(csv_file) # depends on [control=['if'], data=[]] elif profile_name == 'default': # Try reading credentials from environment variables (Issue #11) if the profile name is 'default' credentials = read_creds_from_environment_variables() # depends on [control=['if'], data=[]] if ('AccessKeyId' not in credentials or not credentials['AccessKeyId']) and (not csv_file) and (profile_name == 'default'): ec2_credentials = read_creds_from_ec2_instance_metadata() if ec2_credentials: credentials = ec2_credentials # depends on [control=['if'], data=[]] else: ecs_credentials = read_creds_from_ecs_container_metadata() if ecs_credentials: credentials = ecs_credentials # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # TODO support lambda if role_arn or (not credentials['AccessKeyId'] and (not csv_file)): # Lookup if a role is defined in ~/.aws/config if not role_arn: (role_arn, source_profile, role_mfa_serial, external_id) = read_profile_from_aws_config_file(profile_name) # depends on [control=['if'], data=[]] # Scout2 issue 237 - credentials file may be used to configure role-based profiles... if not role_arn: (role_arn, source_profile, role_mfa_serial, external_id) = read_profile_from_aws_config_file(profile_name, config_file=aws_credentials_file) # depends on [control=['if'], data=[]] if role_arn: # Lookup cached credentials try: cached_credentials_filename = get_cached_credentials_filename(profile_name, role_arn) with open(cached_credentials_filename, 'rt') as f: assume_role_data = json.load(f) oldcred = credentials credentials = assume_role_data['Credentials'] expiration = dateutil.parser.parse(credentials['Expiration']) expiration = expiration.replace(tzinfo=None) current = datetime.datetime.utcnow() if expiration < current: print("Role's credentials have expired on %s" % credentials['Expiration']) credentials = oldcred # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except Exception as e: pass # depends on [control=['except'], data=[]] if not expiration or expiration < current or credentials['AccessKeyId'] == None: if source_profile: credentials = read_creds(source_profile) # depends on [control=['if'], data=[]] if role_mfa_serial: credentials['SerialNumber'] = role_mfa_serial # Auto prompt for a code... if not mfa_code: credentials['TokenCode'] = prompt_4_mfa_code() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if external_id: credentials['ExternalId'] = external_id # depends on [control=['if'], data=[]] credentials = assume_role(profile_name, credentials, role_arn, role_session_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # Read from ~/.aws/credentials credentials = read_creds_from_aws_credentials_file(profile_name) if credentials['SessionToken']: if 'Expiration' in credentials and credentials['Expiration']: expiration = dateutil.parser.parse(credentials['Expiration']) expiration = expiration.replace(tzinfo=None) current = datetime.datetime.utcnow() if expiration < current: printInfo('Saved STS credentials expired on %s' % credentials['Expiration']) force_init = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: force_init = True sts_credentials = credentials # depends on [control=['if'], data=[]] else: first_sts_session = True if force_init or (mfa_serial_arg and mfa_code): credentials = read_creds_from_aws_credentials_file(profile_name if first_sts_session else '%s-nomfa' % profile_name) if not credentials['AccessKeyId']: printInfo('Warning: Unable to determine STS token expiration; later API calls may fail.') credentials = sts_credentials # depends on [control=['if'], data=[]] else: if mfa_serial_arg: credentials['SerialNumber'] = mfa_serial_arg # depends on [control=['if'], data=[]] if mfa_code: credentials['TokenCode'] = mfa_code # depends on [control=['if'], data=[]] if 'AccessKeyId' in credentials and credentials['AccessKeyId']: credentials = init_sts_session(profile_name, credentials) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # If we don't have valid creds by now, print an error message if 'AccessKeyId' not in credentials or credentials['AccessKeyId'] == None or 'SecretAccessKey' not in credentials or (credentials['SecretAccessKey'] == None): printError('Error: could not find AWS credentials. Use the --help option for more information.') # depends on [control=['if'], data=[]] if not 'AccessKeyId' in credentials: credentials = {'AccessKeyId': None} # depends on [control=['if'], data=[]] return credentials
def update_famplex(): """Update all the CSV files that form the FamPlex resource.""" famplex_url_pattern = \ 'https://raw.githubusercontent.com/sorgerlab/famplex/master/%s.csv' csv_names = ['entities', 'equivalences', 'gene_prefixes', 'grounding_map', 'relations'] for csv_name in csv_names: url = famplex_url_pattern % csv_name save_from_http(url, os.path.join(path,'famplex/%s.csv' % csv_name))
def function[update_famplex, parameter[]]: constant[Update all the CSV files that form the FamPlex resource.] variable[famplex_url_pattern] assign[=] constant[https://raw.githubusercontent.com/sorgerlab/famplex/master/%s.csv] variable[csv_names] assign[=] list[[<ast.Constant object at 0x7da20c991f90>, <ast.Constant object at 0x7da20c9906d0>, <ast.Constant object at 0x7da20c993f40>, <ast.Constant object at 0x7da20c9909a0>, <ast.Constant object at 0x7da20c993610>]] for taget[name[csv_name]] in starred[name[csv_names]] begin[:] variable[url] assign[=] binary_operation[name[famplex_url_pattern] <ast.Mod object at 0x7da2590d6920> name[csv_name]] call[name[save_from_http], parameter[name[url], call[name[os].path.join, parameter[name[path], binary_operation[constant[famplex/%s.csv] <ast.Mod object at 0x7da2590d6920> name[csv_name]]]]]]
keyword[def] identifier[update_famplex] (): literal[string] identifier[famplex_url_pattern] = literal[string] identifier[csv_names] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[csv_name] keyword[in] identifier[csv_names] : identifier[url] = identifier[famplex_url_pattern] % identifier[csv_name] identifier[save_from_http] ( identifier[url] , identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] % identifier[csv_name] ))
def update_famplex(): """Update all the CSV files that form the FamPlex resource.""" famplex_url_pattern = 'https://raw.githubusercontent.com/sorgerlab/famplex/master/%s.csv' csv_names = ['entities', 'equivalences', 'gene_prefixes', 'grounding_map', 'relations'] for csv_name in csv_names: url = famplex_url_pattern % csv_name save_from_http(url, os.path.join(path, 'famplex/%s.csv' % csv_name)) # depends on [control=['for'], data=['csv_name']]
def check_block(block_id): """ Verify that a block ID is valid >>> check_block(FIRST_BLOCK_MAINNET) True >>> check_block(FIRST_BLOCK_MAINNET-1) False >>> check_block(-1) False >>> check_block("abc") False >>> check_block(int(1e7) + 1) False >>> check_block(int(1e7) - 1) True """ if type(block_id) not in [int, long]: return False if BLOCKSTACK_TEST: if block_id <= 0: return False else: if block_id < FIRST_BLOCK_MAINNET: return False if block_id > 1e7: # 1 million blocks? not in my lifetime return False return True
def function[check_block, parameter[block_id]]: constant[ Verify that a block ID is valid >>> check_block(FIRST_BLOCK_MAINNET) True >>> check_block(FIRST_BLOCK_MAINNET-1) False >>> check_block(-1) False >>> check_block("abc") False >>> check_block(int(1e7) + 1) False >>> check_block(int(1e7) - 1) True ] if compare[call[name[type], parameter[name[block_id]]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da20c6aa080>, <ast.Name object at 0x7da20c6a8c10>]]] begin[:] return[constant[False]] if name[BLOCKSTACK_TEST] begin[:] if compare[name[block_id] less_or_equal[<=] constant[0]] begin[:] return[constant[False]] if compare[name[block_id] greater[>] constant[10000000.0]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[check_block] ( identifier[block_id] ): literal[string] keyword[if] identifier[type] ( identifier[block_id] ) keyword[not] keyword[in] [ identifier[int] , identifier[long] ]: keyword[return] keyword[False] keyword[if] identifier[BLOCKSTACK_TEST] : keyword[if] identifier[block_id] <= literal[int] : keyword[return] keyword[False] keyword[else] : keyword[if] identifier[block_id] < identifier[FIRST_BLOCK_MAINNET] : keyword[return] keyword[False] keyword[if] identifier[block_id] > literal[int] : keyword[return] keyword[False] keyword[return] keyword[True]
def check_block(block_id): """ Verify that a block ID is valid >>> check_block(FIRST_BLOCK_MAINNET) True >>> check_block(FIRST_BLOCK_MAINNET-1) False >>> check_block(-1) False >>> check_block("abc") False >>> check_block(int(1e7) + 1) False >>> check_block(int(1e7) - 1) True """ if type(block_id) not in [int, long]: return False # depends on [control=['if'], data=[]] if BLOCKSTACK_TEST: if block_id <= 0: return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif block_id < FIRST_BLOCK_MAINNET: return False # depends on [control=['if'], data=[]] if block_id > 10000000.0: # 1 million blocks? not in my lifetime return False # depends on [control=['if'], data=[]] return True
def preprocess_cell(self, cell, resources, cell_index): """Also extracts attachments""" from nbformat.notebooknode import NotebookNode attach_names = [] # Just move the attachment into an output for k, attach in cell.get('attachments', {}).items(): for mime_type in self.extract_output_types: if mime_type in attach: if not 'outputs' in cell: cell['outputs'] = [] o = NotebookNode({ 'data': NotebookNode({mime_type: attach[mime_type]}), 'metadata': NotebookNode({ 'filenames': {mime_type: k} # Will get re-written }), 'output_type': 'display_data' }) cell['outputs'].append(o) attach_names.append((mime_type, k)) nb, resources = super().preprocess_cell(cell, resources, cell_index) output_names = list(resources.get('outputs', {}).keys()) if attach_names: # We're going to assume that attachments are only on Markdown cells, and Markdown cells # can't generate output, so all of the outputs wee added. # reverse + zip matches the last len(attach_names) elements from output_names for output_name, (mimetype, an) in zip(reversed(output_names), reversed(attach_names)): # We'll post process to set the final output directory cell.source = re.sub('\(attachment:{}\)'.format(an), '(__IMGDIR__/{})'.format(output_name), cell.source) return nb, resources
def function[preprocess_cell, parameter[self, cell, resources, cell_index]]: constant[Also extracts attachments] from relative_module[nbformat.notebooknode] import module[NotebookNode] variable[attach_names] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b196bbe0>, <ast.Name object at 0x7da1b196bfa0>]]] in starred[call[call[name[cell].get, parameter[constant[attachments], dictionary[[], []]]].items, parameter[]]] begin[:] for taget[name[mime_type]] in starred[name[self].extract_output_types] begin[:] if compare[name[mime_type] in name[attach]] begin[:] if <ast.UnaryOp object at 0x7da1b1969450> begin[:] call[name[cell]][constant[outputs]] assign[=] list[[]] variable[o] assign[=] call[name[NotebookNode], parameter[dictionary[[<ast.Constant object at 0x7da1b1969840>, <ast.Constant object at 0x7da1b1969630>, <ast.Constant object at 0x7da1b196a020>], [<ast.Call object at 0x7da1b196bd30>, <ast.Call object at 0x7da1b196bdf0>, <ast.Constant object at 0x7da1b1968940>]]]] call[call[name[cell]][constant[outputs]].append, parameter[name[o]]] call[name[attach_names].append, parameter[tuple[[<ast.Name object at 0x7da1b1969300>, <ast.Name object at 0x7da1b1969600>]]]] <ast.Tuple object at 0x7da1b196b4f0> assign[=] call[call[name[super], parameter[]].preprocess_cell, parameter[name[cell], name[resources], name[cell_index]]] variable[output_names] assign[=] call[name[list], parameter[call[call[name[resources].get, parameter[constant[outputs], dictionary[[], []]]].keys, parameter[]]]] if name[attach_names] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b196b220>, <ast.Tuple object at 0x7da1b1968400>]]] in starred[call[name[zip], parameter[call[name[reversed], parameter[name[output_names]]], call[name[reversed], parameter[name[attach_names]]]]]] begin[:] name[cell].source assign[=] call[name[re].sub, parameter[call[constant[\(attachment:{}\)].format, parameter[name[an]]], call[constant[(__IMGDIR__/{})].format, parameter[name[output_name]]], name[cell].source]] return[tuple[[<ast.Name object at 0x7da1b1968100>, <ast.Name object at 0x7da1b19680a0>]]]
keyword[def] identifier[preprocess_cell] ( identifier[self] , identifier[cell] , identifier[resources] , identifier[cell_index] ): literal[string] keyword[from] identifier[nbformat] . identifier[notebooknode] keyword[import] identifier[NotebookNode] identifier[attach_names] =[] keyword[for] identifier[k] , identifier[attach] keyword[in] identifier[cell] . identifier[get] ( literal[string] ,{}). identifier[items] (): keyword[for] identifier[mime_type] keyword[in] identifier[self] . identifier[extract_output_types] : keyword[if] identifier[mime_type] keyword[in] identifier[attach] : keyword[if] keyword[not] literal[string] keyword[in] identifier[cell] : identifier[cell] [ literal[string] ]=[] identifier[o] = identifier[NotebookNode] ({ literal[string] : identifier[NotebookNode] ({ identifier[mime_type] : identifier[attach] [ identifier[mime_type] ]}), literal[string] : identifier[NotebookNode] ({ literal[string] :{ identifier[mime_type] : identifier[k] } }), literal[string] : literal[string] }) identifier[cell] [ literal[string] ]. identifier[append] ( identifier[o] ) identifier[attach_names] . identifier[append] (( identifier[mime_type] , identifier[k] )) identifier[nb] , identifier[resources] = identifier[super] (). identifier[preprocess_cell] ( identifier[cell] , identifier[resources] , identifier[cell_index] ) identifier[output_names] = identifier[list] ( identifier[resources] . identifier[get] ( literal[string] ,{}). identifier[keys] ()) keyword[if] identifier[attach_names] : keyword[for] identifier[output_name] ,( identifier[mimetype] , identifier[an] ) keyword[in] identifier[zip] ( identifier[reversed] ( identifier[output_names] ), identifier[reversed] ( identifier[attach_names] )): identifier[cell] . identifier[source] = identifier[re] . identifier[sub] ( literal[string] . identifier[format] ( identifier[an] ), literal[string] . identifier[format] ( identifier[output_name] ), identifier[cell] . identifier[source] ) keyword[return] identifier[nb] , identifier[resources]
def preprocess_cell(self, cell, resources, cell_index): """Also extracts attachments""" from nbformat.notebooknode import NotebookNode attach_names = [] # Just move the attachment into an output for (k, attach) in cell.get('attachments', {}).items(): for mime_type in self.extract_output_types: if mime_type in attach: if not 'outputs' in cell: cell['outputs'] = [] # depends on [control=['if'], data=[]] # Will get re-written o = NotebookNode({'data': NotebookNode({mime_type: attach[mime_type]}), 'metadata': NotebookNode({'filenames': {mime_type: k}}), 'output_type': 'display_data'}) cell['outputs'].append(o) attach_names.append((mime_type, k)) # depends on [control=['if'], data=['mime_type', 'attach']] # depends on [control=['for'], data=['mime_type']] # depends on [control=['for'], data=[]] (nb, resources) = super().preprocess_cell(cell, resources, cell_index) output_names = list(resources.get('outputs', {}).keys()) if attach_names: # We're going to assume that attachments are only on Markdown cells, and Markdown cells # can't generate output, so all of the outputs wee added. # reverse + zip matches the last len(attach_names) elements from output_names for (output_name, (mimetype, an)) in zip(reversed(output_names), reversed(attach_names)): # We'll post process to set the final output directory cell.source = re.sub('\\(attachment:{}\\)'.format(an), '(__IMGDIR__/{})'.format(output_name), cell.source) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return (nb, resources)
def user_relation_name(self): """ Returns the string name of the related name to the user. This provides a consistent interface across different organization model classes. """ return "{0}_{1}".format( self._meta.app_label.lower(), self.__class__.__name__.lower() )
def function[user_relation_name, parameter[self]]: constant[ Returns the string name of the related name to the user. This provides a consistent interface across different organization model classes. ] return[call[constant[{0}_{1}].format, parameter[call[name[self]._meta.app_label.lower, parameter[]], call[name[self].__class__.__name__.lower, parameter[]]]]]
keyword[def] identifier[user_relation_name] ( identifier[self] ): literal[string] keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[_meta] . identifier[app_label] . identifier[lower] (), identifier[self] . identifier[__class__] . identifier[__name__] . identifier[lower] () )
def user_relation_name(self): """ Returns the string name of the related name to the user. This provides a consistent interface across different organization model classes. """ return '{0}_{1}'.format(self._meta.app_label.lower(), self.__class__.__name__.lower())
def truncate(text, max_len=350, end='...'): """Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text. """ if len(text) <= max_len: return text return text[:max_len].rsplit(' ', maxsplit=1)[0] + end
def function[truncate, parameter[text, max_len, end]]: constant[Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text. ] if compare[call[name[len], parameter[name[text]]] less_or_equal[<=] name[max_len]] begin[:] return[name[text]] return[binary_operation[call[call[call[name[text]][<ast.Slice object at 0x7da1afe0dbd0>].rsplit, parameter[constant[ ]]]][constant[0]] + name[end]]]
keyword[def] identifier[truncate] ( identifier[text] , identifier[max_len] = literal[int] , identifier[end] = literal[string] ): literal[string] keyword[if] identifier[len] ( identifier[text] )<= identifier[max_len] : keyword[return] identifier[text] keyword[return] identifier[text] [: identifier[max_len] ]. identifier[rsplit] ( literal[string] , identifier[maxsplit] = literal[int] )[ literal[int] ]+ identifier[end]
def truncate(text, max_len=350, end='...'): """Truncate the supplied text for display. Arguments: text (:py:class:`str`): The text to truncate. max_len (:py:class:`int`, optional): The maximum length of the text before truncation (defaults to 350 characters). end (:py:class:`str`, optional): The ending to use to show that the text was truncated (defaults to ``'...'``). Returns: :py:class:`str`: The truncated text. """ if len(text) <= max_len: return text # depends on [control=['if'], data=[]] return text[:max_len].rsplit(' ', maxsplit=1)[0] + end
def alpha_mod(self): """int: The additional alpha value used in render copy operations.""" a = ffi.new('Uint8 *') check_int_err(lib.SDL_GetTextureAlphaMod(self._ptr, a)) return a[0]
def function[alpha_mod, parameter[self]]: constant[int: The additional alpha value used in render copy operations.] variable[a] assign[=] call[name[ffi].new, parameter[constant[Uint8 *]]] call[name[check_int_err], parameter[call[name[lib].SDL_GetTextureAlphaMod, parameter[name[self]._ptr, name[a]]]]] return[call[name[a]][constant[0]]]
keyword[def] identifier[alpha_mod] ( identifier[self] ): literal[string] identifier[a] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[check_int_err] ( identifier[lib] . identifier[SDL_GetTextureAlphaMod] ( identifier[self] . identifier[_ptr] , identifier[a] )) keyword[return] identifier[a] [ literal[int] ]
def alpha_mod(self): """int: The additional alpha value used in render copy operations.""" a = ffi.new('Uint8 *') check_int_err(lib.SDL_GetTextureAlphaMod(self._ptr, a)) return a[0]
def report(self, account_id, status_ids = None, comment = None, forward = False): """ Report statuses to the instances administrators. Accepts a list of toot IDs associated with the report, and a comment. Set forward to True to forward a report of a remote user to that users instance as well as sending it to the instance local administrators. Returns a `report dict`_. """ account_id = self.__unpack_id(account_id) if not status_ids is None: if not isinstance(status_ids, list): status_ids = [status_ids] status_ids = list(map(lambda x: self.__unpack_id(x), status_ids)) params_initial = locals() if forward == False: del params_initial['forward'] params = self.__generate_params(params_initial) return self.__api_request('POST', '/api/v1/reports/', params)
def function[report, parameter[self, account_id, status_ids, comment, forward]]: constant[ Report statuses to the instances administrators. Accepts a list of toot IDs associated with the report, and a comment. Set forward to True to forward a report of a remote user to that users instance as well as sending it to the instance local administrators. Returns a `report dict`_. ] variable[account_id] assign[=] call[name[self].__unpack_id, parameter[name[account_id]]] if <ast.UnaryOp object at 0x7da1b13a9f90> begin[:] if <ast.UnaryOp object at 0x7da1b13aa890> begin[:] variable[status_ids] assign[=] list[[<ast.Name object at 0x7da1b13a9a80>]] variable[status_ids] assign[=] call[name[list], parameter[call[name[map], parameter[<ast.Lambda object at 0x7da1b13a97e0>, name[status_ids]]]]] variable[params_initial] assign[=] call[name[locals], parameter[]] if compare[name[forward] equal[==] constant[False]] begin[:] <ast.Delete object at 0x7da1b13a9c60> variable[params] assign[=] call[name[self].__generate_params, parameter[name[params_initial]]] return[call[name[self].__api_request, parameter[constant[POST], constant[/api/v1/reports/], name[params]]]]
keyword[def] identifier[report] ( identifier[self] , identifier[account_id] , identifier[status_ids] = keyword[None] , identifier[comment] = keyword[None] , identifier[forward] = keyword[False] ): literal[string] identifier[account_id] = identifier[self] . identifier[__unpack_id] ( identifier[account_id] ) keyword[if] keyword[not] identifier[status_ids] keyword[is] keyword[None] : keyword[if] keyword[not] identifier[isinstance] ( identifier[status_ids] , identifier[list] ): identifier[status_ids] =[ identifier[status_ids] ] identifier[status_ids] = identifier[list] ( identifier[map] ( keyword[lambda] identifier[x] : identifier[self] . identifier[__unpack_id] ( identifier[x] ), identifier[status_ids] )) identifier[params_initial] = identifier[locals] () keyword[if] identifier[forward] == keyword[False] : keyword[del] identifier[params_initial] [ literal[string] ] identifier[params] = identifier[self] . identifier[__generate_params] ( identifier[params_initial] ) keyword[return] identifier[self] . identifier[__api_request] ( literal[string] , literal[string] , identifier[params] )
def report(self, account_id, status_ids=None, comment=None, forward=False): """ Report statuses to the instances administrators. Accepts a list of toot IDs associated with the report, and a comment. Set forward to True to forward a report of a remote user to that users instance as well as sending it to the instance local administrators. Returns a `report dict`_. """ account_id = self.__unpack_id(account_id) if not status_ids is None: if not isinstance(status_ids, list): status_ids = [status_ids] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] status_ids = list(map(lambda x: self.__unpack_id(x), status_ids)) params_initial = locals() if forward == False: del params_initial['forward'] # depends on [control=['if'], data=[]] params = self.__generate_params(params_initial) return self.__api_request('POST', '/api/v1/reports/', params)
def returner(ret): ''' Send a slack message with the data through a webhook :param ret: The Salt return :return: The result of the post ''' _options = _get_options(ret) webhook = _options.get('webhook', None) show_tasks = _options.get('show_tasks') author_icon = _options.get('author_icon') if not webhook or webhook is '': log.error('%s.webhook not defined in salt config', __virtualname__) return report = _generate_report(ret, show_tasks) if report.get('success'): title = _options.get('success_title') else: title = _options.get('failure_title') slack = _post_message(webhook, author_icon, title, report) return slack
def function[returner, parameter[ret]]: constant[ Send a slack message with the data through a webhook :param ret: The Salt return :return: The result of the post ] variable[_options] assign[=] call[name[_get_options], parameter[name[ret]]] variable[webhook] assign[=] call[name[_options].get, parameter[constant[webhook], constant[None]]] variable[show_tasks] assign[=] call[name[_options].get, parameter[constant[show_tasks]]] variable[author_icon] assign[=] call[name[_options].get, parameter[constant[author_icon]]] if <ast.BoolOp object at 0x7da1b2023fa0> begin[:] call[name[log].error, parameter[constant[%s.webhook not defined in salt config], name[__virtualname__]]] return[None] variable[report] assign[=] call[name[_generate_report], parameter[name[ret], name[show_tasks]]] if call[name[report].get, parameter[constant[success]]] begin[:] variable[title] assign[=] call[name[_options].get, parameter[constant[success_title]]] variable[slack] assign[=] call[name[_post_message], parameter[name[webhook], name[author_icon], name[title], name[report]]] return[name[slack]]
keyword[def] identifier[returner] ( identifier[ret] ): literal[string] identifier[_options] = identifier[_get_options] ( identifier[ret] ) identifier[webhook] = identifier[_options] . identifier[get] ( literal[string] , keyword[None] ) identifier[show_tasks] = identifier[_options] . identifier[get] ( literal[string] ) identifier[author_icon] = identifier[_options] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[webhook] keyword[or] identifier[webhook] keyword[is] literal[string] : identifier[log] . identifier[error] ( literal[string] , identifier[__virtualname__] ) keyword[return] identifier[report] = identifier[_generate_report] ( identifier[ret] , identifier[show_tasks] ) keyword[if] identifier[report] . identifier[get] ( literal[string] ): identifier[title] = identifier[_options] . identifier[get] ( literal[string] ) keyword[else] : identifier[title] = identifier[_options] . identifier[get] ( literal[string] ) identifier[slack] = identifier[_post_message] ( identifier[webhook] , identifier[author_icon] , identifier[title] , identifier[report] ) keyword[return] identifier[slack]
def returner(ret): """ Send a slack message with the data through a webhook :param ret: The Salt return :return: The result of the post """ _options = _get_options(ret) webhook = _options.get('webhook', None) show_tasks = _options.get('show_tasks') author_icon = _options.get('author_icon') if not webhook or webhook is '': log.error('%s.webhook not defined in salt config', __virtualname__) return # depends on [control=['if'], data=[]] report = _generate_report(ret, show_tasks) if report.get('success'): title = _options.get('success_title') # depends on [control=['if'], data=[]] else: title = _options.get('failure_title') slack = _post_message(webhook, author_icon, title, report) return slack
def get_subgraph_by_node_search(graph: BELGraph, query: Strings) -> BELGraph: """Get a sub-graph induced over all nodes matching the query string. :param graph: A BEL Graph :param query: A query string or iterable of query strings for node names Thinly wraps :func:`search_node_names` and :func:`get_subgraph_by_induction`. """ nodes = search_node_names(graph, query) return get_subgraph_by_induction(graph, nodes)
def function[get_subgraph_by_node_search, parameter[graph, query]]: constant[Get a sub-graph induced over all nodes matching the query string. :param graph: A BEL Graph :param query: A query string or iterable of query strings for node names Thinly wraps :func:`search_node_names` and :func:`get_subgraph_by_induction`. ] variable[nodes] assign[=] call[name[search_node_names], parameter[name[graph], name[query]]] return[call[name[get_subgraph_by_induction], parameter[name[graph], name[nodes]]]]
keyword[def] identifier[get_subgraph_by_node_search] ( identifier[graph] : identifier[BELGraph] , identifier[query] : identifier[Strings] )-> identifier[BELGraph] : literal[string] identifier[nodes] = identifier[search_node_names] ( identifier[graph] , identifier[query] ) keyword[return] identifier[get_subgraph_by_induction] ( identifier[graph] , identifier[nodes] )
def get_subgraph_by_node_search(graph: BELGraph, query: Strings) -> BELGraph: """Get a sub-graph induced over all nodes matching the query string. :param graph: A BEL Graph :param query: A query string or iterable of query strings for node names Thinly wraps :func:`search_node_names` and :func:`get_subgraph_by_induction`. """ nodes = search_node_names(graph, query) return get_subgraph_by_induction(graph, nodes)
def t_recipe_RECIPE_LINE(self, t): r'.*\n' t.type = 'RECIPE_LINE' t.lexer.lineno += t.value.count("\n") return t
def function[t_recipe_RECIPE_LINE, parameter[self, t]]: constant[.*\n] name[t].type assign[=] constant[RECIPE_LINE] <ast.AugAssign object at 0x7da20c795fc0> return[name[t]]
keyword[def] identifier[t_recipe_RECIPE_LINE] ( identifier[self] , identifier[t] ): literal[string] identifier[t] . identifier[type] = literal[string] identifier[t] . identifier[lexer] . identifier[lineno] += identifier[t] . identifier[value] . identifier[count] ( literal[string] ) keyword[return] identifier[t]
def t_recipe_RECIPE_LINE(self, t): """.*\\n""" t.type = 'RECIPE_LINE' t.lexer.lineno += t.value.count('\n') return t
def status_color(status): """Return the appropriate status color.""" status_color = c.Fore.GREEN if not status: status_color = c.Fore.RED return status_color
def function[status_color, parameter[status]]: constant[Return the appropriate status color.] variable[status_color] assign[=] name[c].Fore.GREEN if <ast.UnaryOp object at 0x7da1b0ce2fe0> begin[:] variable[status_color] assign[=] name[c].Fore.RED return[name[status_color]]
keyword[def] identifier[status_color] ( identifier[status] ): literal[string] identifier[status_color] = identifier[c] . identifier[Fore] . identifier[GREEN] keyword[if] keyword[not] identifier[status] : identifier[status_color] = identifier[c] . identifier[Fore] . identifier[RED] keyword[return] identifier[status_color]
def status_color(status): """Return the appropriate status color.""" status_color = c.Fore.GREEN if not status: status_color = c.Fore.RED # depends on [control=['if'], data=[]] return status_color
def insert_one(self, doc, *args, **kwargs): """ Inserts one document into the collection If contains '_id' key it is used, else it is generated. :param doc: the document :return: InsertOneResult """ if self.table is None: self.build_table() if not isinstance(doc, dict): raise ValueError(u'"doc" must be a dict') _id = doc[u'_id'] = doc.get('_id') or generate_id() bypass_document_validation = kwargs.get('bypass_document_validation') if bypass_document_validation is True: # insert doc without validation of duplicated `_id` eid = self.table.insert(doc) else: existing = self.find_one({'_id': _id}) if existing is None: eid = self.table.insert(doc) else: raise DuplicateKeyError( u'_id:{0} already exists in collection:{1}'.format( _id, self.tablename ) ) return InsertOneResult(eid=eid, inserted_id=_id)
def function[insert_one, parameter[self, doc]]: constant[ Inserts one document into the collection If contains '_id' key it is used, else it is generated. :param doc: the document :return: InsertOneResult ] if compare[name[self].table is constant[None]] begin[:] call[name[self].build_table, parameter[]] if <ast.UnaryOp object at 0x7da18f58c190> begin[:] <ast.Raise object at 0x7da18f58df00> variable[_id] assign[=] <ast.BoolOp object at 0x7da18f58ed40> variable[bypass_document_validation] assign[=] call[name[kwargs].get, parameter[constant[bypass_document_validation]]] if compare[name[bypass_document_validation] is constant[True]] begin[:] variable[eid] assign[=] call[name[self].table.insert, parameter[name[doc]]] return[call[name[InsertOneResult], parameter[]]]
keyword[def] identifier[insert_one] ( identifier[self] , identifier[doc] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[self] . identifier[table] keyword[is] keyword[None] : identifier[self] . identifier[build_table] () keyword[if] keyword[not] identifier[isinstance] ( identifier[doc] , identifier[dict] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[_id] = identifier[doc] [ literal[string] ]= identifier[doc] . identifier[get] ( literal[string] ) keyword[or] identifier[generate_id] () identifier[bypass_document_validation] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[bypass_document_validation] keyword[is] keyword[True] : identifier[eid] = identifier[self] . identifier[table] . identifier[insert] ( identifier[doc] ) keyword[else] : identifier[existing] = identifier[self] . identifier[find_one] ({ literal[string] : identifier[_id] }) keyword[if] identifier[existing] keyword[is] keyword[None] : identifier[eid] = identifier[self] . identifier[table] . identifier[insert] ( identifier[doc] ) keyword[else] : keyword[raise] identifier[DuplicateKeyError] ( literal[string] . identifier[format] ( identifier[_id] , identifier[self] . identifier[tablename] ) ) keyword[return] identifier[InsertOneResult] ( identifier[eid] = identifier[eid] , identifier[inserted_id] = identifier[_id] )
def insert_one(self, doc, *args, **kwargs): """ Inserts one document into the collection If contains '_id' key it is used, else it is generated. :param doc: the document :return: InsertOneResult """ if self.table is None: self.build_table() # depends on [control=['if'], data=[]] if not isinstance(doc, dict): raise ValueError(u'"doc" must be a dict') # depends on [control=['if'], data=[]] _id = doc[u'_id'] = doc.get('_id') or generate_id() bypass_document_validation = kwargs.get('bypass_document_validation') if bypass_document_validation is True: # insert doc without validation of duplicated `_id` eid = self.table.insert(doc) # depends on [control=['if'], data=[]] else: existing = self.find_one({'_id': _id}) if existing is None: eid = self.table.insert(doc) # depends on [control=['if'], data=[]] else: raise DuplicateKeyError(u'_id:{0} already exists in collection:{1}'.format(_id, self.tablename)) return InsertOneResult(eid=eid, inserted_id=_id)
def info_signal(self, args): """Print information about a signal""" if len(args) == 0: return None signame = args[0] if signame in ['handle', 'signal']: # This has come from dbgr's info command if len(args) == 1: # Show all signal handlers self.dbgr.core.processor.section(self.header) for signame in self.siglist: self.print_info_signal_entry(signame) return True else: signame = args[1] pass pass signame = self.is_name_or_number(signame) self.dbgr.core.processor.section(self.header) self.print_info_signal_entry(signame) return True
def function[info_signal, parameter[self, args]]: constant[Print information about a signal] if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] return[constant[None]] variable[signame] assign[=] call[name[args]][constant[0]] if compare[name[signame] in list[[<ast.Constant object at 0x7da1b032cdf0>, <ast.Constant object at 0x7da1b032cc40>]]] begin[:] if compare[call[name[len], parameter[name[args]]] equal[==] constant[1]] begin[:] call[name[self].dbgr.core.processor.section, parameter[name[self].header]] for taget[name[signame]] in starred[name[self].siglist] begin[:] call[name[self].print_info_signal_entry, parameter[name[signame]]] return[constant[True]] pass variable[signame] assign[=] call[name[self].is_name_or_number, parameter[name[signame]]] call[name[self].dbgr.core.processor.section, parameter[name[self].header]] call[name[self].print_info_signal_entry, parameter[name[signame]]] return[constant[True]]
keyword[def] identifier[info_signal] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[return] keyword[None] identifier[signame] = identifier[args] [ literal[int] ] keyword[if] identifier[signame] keyword[in] [ literal[string] , literal[string] ]: keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[self] . identifier[dbgr] . identifier[core] . identifier[processor] . identifier[section] ( identifier[self] . identifier[header] ) keyword[for] identifier[signame] keyword[in] identifier[self] . identifier[siglist] : identifier[self] . identifier[print_info_signal_entry] ( identifier[signame] ) keyword[return] keyword[True] keyword[else] : identifier[signame] = identifier[args] [ literal[int] ] keyword[pass] keyword[pass] identifier[signame] = identifier[self] . identifier[is_name_or_number] ( identifier[signame] ) identifier[self] . identifier[dbgr] . identifier[core] . identifier[processor] . identifier[section] ( identifier[self] . identifier[header] ) identifier[self] . identifier[print_info_signal_entry] ( identifier[signame] ) keyword[return] keyword[True]
def info_signal(self, args): """Print information about a signal""" if len(args) == 0: return None # depends on [control=['if'], data=[]] signame = args[0] if signame in ['handle', 'signal']: # This has come from dbgr's info command if len(args) == 1: # Show all signal handlers self.dbgr.core.processor.section(self.header) for signame in self.siglist: self.print_info_signal_entry(signame) # depends on [control=['for'], data=['signame']] return True # depends on [control=['if'], data=[]] else: signame = args[1] pass pass # depends on [control=['if'], data=['signame']] signame = self.is_name_or_number(signame) self.dbgr.core.processor.section(self.header) self.print_info_signal_entry(signame) return True
def patch_mock_desc(self, patch, *args, **kwarg): """ Context manager or decorator in order to patch a mock definition of service endpoint in a test. :param patch: Dictionary in order to update endpoint's mock definition :type patch: dict :param service_name: Name of service where you want to use mock. If None it will be used as soon as possible. :type service_name: str :param endpoint: Endpoint where you want to use mock. If None it will be used as soon as possible. :type endpoint: str :param offset: Times it must be ignored before use. Default 0. Only positive integers. :type offset: int :param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers. :type limit: int :return: PatchMockDescDefinition """ return PatchMockDescDefinition(patch, self, *args, **kwarg)
def function[patch_mock_desc, parameter[self, patch]]: constant[ Context manager or decorator in order to patch a mock definition of service endpoint in a test. :param patch: Dictionary in order to update endpoint's mock definition :type patch: dict :param service_name: Name of service where you want to use mock. If None it will be used as soon as possible. :type service_name: str :param endpoint: Endpoint where you want to use mock. If None it will be used as soon as possible. :type endpoint: str :param offset: Times it must be ignored before use. Default 0. Only positive integers. :type offset: int :param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers. :type limit: int :return: PatchMockDescDefinition ] return[call[name[PatchMockDescDefinition], parameter[name[patch], name[self], <ast.Starred object at 0x7da1b130a050>]]]
keyword[def] identifier[patch_mock_desc] ( identifier[self] , identifier[patch] ,* identifier[args] ,** identifier[kwarg] ): literal[string] keyword[return] identifier[PatchMockDescDefinition] ( identifier[patch] , identifier[self] ,* identifier[args] ,** identifier[kwarg] )
def patch_mock_desc(self, patch, *args, **kwarg): """ Context manager or decorator in order to patch a mock definition of service endpoint in a test. :param patch: Dictionary in order to update endpoint's mock definition :type patch: dict :param service_name: Name of service where you want to use mock. If None it will be used as soon as possible. :type service_name: str :param endpoint: Endpoint where you want to use mock. If None it will be used as soon as possible. :type endpoint: str :param offset: Times it must be ignored before use. Default 0. Only positive integers. :type offset: int :param limit: Times it could be used. Default 1. 0 means no limit. Only positive integers. :type limit: int :return: PatchMockDescDefinition """ return PatchMockDescDefinition(patch, self, *args, **kwarg)
def match(self, p_todo): """ Performs a match on a priority in the todo. It gets priority from p_todo and compares it with user-entered expression based on the given operator (default ==). It does that however in reversed order to obtain more intuitive result. Example: (>B) will match todos with priority (A). Items without priority are designated with corresponding operand set to 'ZZ', because python doesn't allow NoneType() and str() comparisons. """ operand1 = self.value operand2 = p_todo.priority() or 'ZZ' return self.compare_operands(operand1, operand2)
def function[match, parameter[self, p_todo]]: constant[ Performs a match on a priority in the todo. It gets priority from p_todo and compares it with user-entered expression based on the given operator (default ==). It does that however in reversed order to obtain more intuitive result. Example: (>B) will match todos with priority (A). Items without priority are designated with corresponding operand set to 'ZZ', because python doesn't allow NoneType() and str() comparisons. ] variable[operand1] assign[=] name[self].value variable[operand2] assign[=] <ast.BoolOp object at 0x7da1b2345240> return[call[name[self].compare_operands, parameter[name[operand1], name[operand2]]]]
keyword[def] identifier[match] ( identifier[self] , identifier[p_todo] ): literal[string] identifier[operand1] = identifier[self] . identifier[value] identifier[operand2] = identifier[p_todo] . identifier[priority] () keyword[or] literal[string] keyword[return] identifier[self] . identifier[compare_operands] ( identifier[operand1] , identifier[operand2] )
def match(self, p_todo): """ Performs a match on a priority in the todo. It gets priority from p_todo and compares it with user-entered expression based on the given operator (default ==). It does that however in reversed order to obtain more intuitive result. Example: (>B) will match todos with priority (A). Items without priority are designated with corresponding operand set to 'ZZ', because python doesn't allow NoneType() and str() comparisons. """ operand1 = self.value operand2 = p_todo.priority() or 'ZZ' return self.compare_operands(operand1, operand2)
def _LayoutShapeFactory(shape_elm, parent): """ Return an instance of the appropriate shape proxy class for *shape_elm* on a slide layout. """ tag_name = shape_elm.tag if tag_name == qn('p:sp') and shape_elm.has_ph_elm: return LayoutPlaceholder(shape_elm, parent) return BaseShapeFactory(shape_elm, parent)
def function[_LayoutShapeFactory, parameter[shape_elm, parent]]: constant[ Return an instance of the appropriate shape proxy class for *shape_elm* on a slide layout. ] variable[tag_name] assign[=] name[shape_elm].tag if <ast.BoolOp object at 0x7da204961ba0> begin[:] return[call[name[LayoutPlaceholder], parameter[name[shape_elm], name[parent]]]] return[call[name[BaseShapeFactory], parameter[name[shape_elm], name[parent]]]]
keyword[def] identifier[_LayoutShapeFactory] ( identifier[shape_elm] , identifier[parent] ): literal[string] identifier[tag_name] = identifier[shape_elm] . identifier[tag] keyword[if] identifier[tag_name] == identifier[qn] ( literal[string] ) keyword[and] identifier[shape_elm] . identifier[has_ph_elm] : keyword[return] identifier[LayoutPlaceholder] ( identifier[shape_elm] , identifier[parent] ) keyword[return] identifier[BaseShapeFactory] ( identifier[shape_elm] , identifier[parent] )
def _LayoutShapeFactory(shape_elm, parent): """ Return an instance of the appropriate shape proxy class for *shape_elm* on a slide layout. """ tag_name = shape_elm.tag if tag_name == qn('p:sp') and shape_elm.has_ph_elm: return LayoutPlaceholder(shape_elm, parent) # depends on [control=['if'], data=[]] return BaseShapeFactory(shape_elm, parent)
def delete_expired_requests(): """Delete expired inclusion requests.""" InclusionRequest.query.filter_by( InclusionRequest.expiry_date > datetime.utcnow()).delete() db.session.commit()
def function[delete_expired_requests, parameter[]]: constant[Delete expired inclusion requests.] call[call[name[InclusionRequest].query.filter_by, parameter[compare[name[InclusionRequest].expiry_date greater[>] call[name[datetime].utcnow, parameter[]]]]].delete, parameter[]] call[name[db].session.commit, parameter[]]
keyword[def] identifier[delete_expired_requests] (): literal[string] identifier[InclusionRequest] . identifier[query] . identifier[filter_by] ( identifier[InclusionRequest] . identifier[expiry_date] > identifier[datetime] . identifier[utcnow] ()). identifier[delete] () identifier[db] . identifier[session] . identifier[commit] ()
def delete_expired_requests(): """Delete expired inclusion requests.""" InclusionRequest.query.filter_by(InclusionRequest.expiry_date > datetime.utcnow()).delete() db.session.commit()
def p_property_decl(self, p): """ property_decl : prop_open style_list t_semicolon | prop_open style_list css_important t_semicolon | prop_open empty t_semicolon """ l = len(p) p[0] = Property(list(p)[1:-1], p.lineno(l - 1))
def function[p_property_decl, parameter[self, p]]: constant[ property_decl : prop_open style_list t_semicolon | prop_open style_list css_important t_semicolon | prop_open empty t_semicolon ] variable[l] assign[=] call[name[len], parameter[name[p]]] call[name[p]][constant[0]] assign[=] call[name[Property], parameter[call[call[name[list], parameter[name[p]]]][<ast.Slice object at 0x7da1aff8c520>], call[name[p].lineno, parameter[binary_operation[name[l] - constant[1]]]]]]
keyword[def] identifier[p_property_decl] ( identifier[self] , identifier[p] ): literal[string] identifier[l] = identifier[len] ( identifier[p] ) identifier[p] [ literal[int] ]= identifier[Property] ( identifier[list] ( identifier[p] )[ literal[int] :- literal[int] ], identifier[p] . identifier[lineno] ( identifier[l] - literal[int] ))
def p_property_decl(self, p): """ property_decl : prop_open style_list t_semicolon | prop_open style_list css_important t_semicolon | prop_open empty t_semicolon """ l = len(p) p[0] = Property(list(p)[1:-1], p.lineno(l - 1))
def fast_pop(self, key=NOT_SET, index=NOT_SET): """Pop a specific item quickly by swapping it to the end. Remove value with given key or index (last item by default) fast by swapping it to the last place first. Changes order of the remaining items (item that used to be last goes to the popped location). Returns tuple of (poped_value, new_moved_index, moved_key, moved_value). If key is not found raises KeyError or IndexError. Runs in O(1). """ if index is NOT_SET and key is not NOT_SET: index, popped_value = self._dict.pop(key) elif key is NOT_SET: if index is NOT_SET: index = len(self._list) - 1 key, popped_value2 = self._list[-1] else: key, popped_value2 = self._list[index] if index < 0: index += len(self._list) index2, popped_value = self._dict.pop(key) assert index == index2 else: raise KEY_AND_INDEX_ERROR if key == self._list[-1][0]: # The item we're removing happens to be the last in the list, # no swapping needed _, popped_value2 = self._list.pop() assert popped_value is popped_value2 return popped_value, len(self._list), key, popped_value else: # Swap the last item onto the deleted spot and # pop the last item from the list self._list[index] = self._list[-1] moved_key, moved_value = self._list.pop() self._dict[moved_key] = (index, moved_value) return popped_value, index, moved_key, moved_value
def function[fast_pop, parameter[self, key, index]]: constant[Pop a specific item quickly by swapping it to the end. Remove value with given key or index (last item by default) fast by swapping it to the last place first. Changes order of the remaining items (item that used to be last goes to the popped location). Returns tuple of (poped_value, new_moved_index, moved_key, moved_value). If key is not found raises KeyError or IndexError. Runs in O(1). ] if <ast.BoolOp object at 0x7da1b26ed120> begin[:] <ast.Tuple object at 0x7da1b26ecb80> assign[=] call[name[self]._dict.pop, parameter[name[key]]] if compare[name[key] equal[==] call[call[name[self]._list][<ast.UnaryOp object at 0x7da1b2584610>]][constant[0]]] begin[:] <ast.Tuple object at 0x7da1b2584a00> assign[=] call[name[self]._list.pop, parameter[]] assert[compare[name[popped_value] is name[popped_value2]]] return[tuple[[<ast.Name object at 0x7da1b2584790>, <ast.Call object at 0x7da20e957160>, <ast.Name object at 0x7da20e9551b0>, <ast.Name object at 0x7da20e954af0>]]]
keyword[def] identifier[fast_pop] ( identifier[self] , identifier[key] = identifier[NOT_SET] , identifier[index] = identifier[NOT_SET] ): literal[string] keyword[if] identifier[index] keyword[is] identifier[NOT_SET] keyword[and] identifier[key] keyword[is] keyword[not] identifier[NOT_SET] : identifier[index] , identifier[popped_value] = identifier[self] . identifier[_dict] . identifier[pop] ( identifier[key] ) keyword[elif] identifier[key] keyword[is] identifier[NOT_SET] : keyword[if] identifier[index] keyword[is] identifier[NOT_SET] : identifier[index] = identifier[len] ( identifier[self] . identifier[_list] )- literal[int] identifier[key] , identifier[popped_value2] = identifier[self] . identifier[_list] [- literal[int] ] keyword[else] : identifier[key] , identifier[popped_value2] = identifier[self] . identifier[_list] [ identifier[index] ] keyword[if] identifier[index] < literal[int] : identifier[index] += identifier[len] ( identifier[self] . identifier[_list] ) identifier[index2] , identifier[popped_value] = identifier[self] . identifier[_dict] . identifier[pop] ( identifier[key] ) keyword[assert] identifier[index] == identifier[index2] keyword[else] : keyword[raise] identifier[KEY_AND_INDEX_ERROR] keyword[if] identifier[key] == identifier[self] . identifier[_list] [- literal[int] ][ literal[int] ]: identifier[_] , identifier[popped_value2] = identifier[self] . identifier[_list] . identifier[pop] () keyword[assert] identifier[popped_value] keyword[is] identifier[popped_value2] keyword[return] identifier[popped_value] , identifier[len] ( identifier[self] . identifier[_list] ), identifier[key] , identifier[popped_value] keyword[else] : identifier[self] . identifier[_list] [ identifier[index] ]= identifier[self] . identifier[_list] [- literal[int] ] identifier[moved_key] , identifier[moved_value] = identifier[self] . identifier[_list] . identifier[pop] () identifier[self] . identifier[_dict] [ identifier[moved_key] ]=( identifier[index] , identifier[moved_value] ) keyword[return] identifier[popped_value] , identifier[index] , identifier[moved_key] , identifier[moved_value]
def fast_pop(self, key=NOT_SET, index=NOT_SET): """Pop a specific item quickly by swapping it to the end. Remove value with given key or index (last item by default) fast by swapping it to the last place first. Changes order of the remaining items (item that used to be last goes to the popped location). Returns tuple of (poped_value, new_moved_index, moved_key, moved_value). If key is not found raises KeyError or IndexError. Runs in O(1). """ if index is NOT_SET and key is not NOT_SET: (index, popped_value) = self._dict.pop(key) # depends on [control=['if'], data=[]] elif key is NOT_SET: if index is NOT_SET: index = len(self._list) - 1 (key, popped_value2) = self._list[-1] # depends on [control=['if'], data=['index']] else: (key, popped_value2) = self._list[index] if index < 0: index += len(self._list) # depends on [control=['if'], data=['index']] (index2, popped_value) = self._dict.pop(key) assert index == index2 # depends on [control=['if'], data=['key', 'NOT_SET']] else: raise KEY_AND_INDEX_ERROR if key == self._list[-1][0]: # The item we're removing happens to be the last in the list, # no swapping needed (_, popped_value2) = self._list.pop() assert popped_value is popped_value2 return (popped_value, len(self._list), key, popped_value) # depends on [control=['if'], data=['key']] else: # Swap the last item onto the deleted spot and # pop the last item from the list self._list[index] = self._list[-1] (moved_key, moved_value) = self._list.pop() self._dict[moved_key] = (index, moved_value) return (popped_value, index, moved_key, moved_value)
def command_list(prog_name, prof_mgr, prof_name, prog_args): """ Print the list of components. """ # Retrieve arguments parser = argparse.ArgumentParser( prog=prog_name ) args = parser.parse_args(prog_args) # Profile load prof_stub = prof_mgr.load(prof_name) # Print component list out = io.StringIO() for comp_stub in prof_stub.component_list(): if comp_stub.name() is not None: out.write(comp_stub.name()) out.write("\n") out.seek(0) sys.stdout.write(out.read())
def function[command_list, parameter[prog_name, prof_mgr, prof_name, prog_args]]: constant[ Print the list of components. ] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] variable[args] assign[=] call[name[parser].parse_args, parameter[name[prog_args]]] variable[prof_stub] assign[=] call[name[prof_mgr].load, parameter[name[prof_name]]] variable[out] assign[=] call[name[io].StringIO, parameter[]] for taget[name[comp_stub]] in starred[call[name[prof_stub].component_list, parameter[]]] begin[:] if compare[call[name[comp_stub].name, parameter[]] is_not constant[None]] begin[:] call[name[out].write, parameter[call[name[comp_stub].name, parameter[]]]] call[name[out].write, parameter[constant[ ]]] call[name[out].seek, parameter[constant[0]]] call[name[sys].stdout.write, parameter[call[name[out].read, parameter[]]]]
keyword[def] identifier[command_list] ( identifier[prog_name] , identifier[prof_mgr] , identifier[prof_name] , identifier[prog_args] ): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[prog] = identifier[prog_name] ) identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[prog_args] ) identifier[prof_stub] = identifier[prof_mgr] . identifier[load] ( identifier[prof_name] ) identifier[out] = identifier[io] . identifier[StringIO] () keyword[for] identifier[comp_stub] keyword[in] identifier[prof_stub] . identifier[component_list] (): keyword[if] identifier[comp_stub] . identifier[name] () keyword[is] keyword[not] keyword[None] : identifier[out] . identifier[write] ( identifier[comp_stub] . identifier[name] ()) identifier[out] . identifier[write] ( literal[string] ) identifier[out] . identifier[seek] ( literal[int] ) identifier[sys] . identifier[stdout] . identifier[write] ( identifier[out] . identifier[read] ())
def command_list(prog_name, prof_mgr, prof_name, prog_args): """ Print the list of components. """ # Retrieve arguments parser = argparse.ArgumentParser(prog=prog_name) args = parser.parse_args(prog_args) # Profile load prof_stub = prof_mgr.load(prof_name) # Print component list out = io.StringIO() for comp_stub in prof_stub.component_list(): if comp_stub.name() is not None: out.write(comp_stub.name()) out.write('\n') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['comp_stub']] out.seek(0) sys.stdout.write(out.read())
def _add_games_to_schedule(self, schedule): """ Add game information to list of games. Create a Game instance for the given game in the schedule and add it to the list of games the team has or will play during the season. Parameters ---------- schedule : PyQuery object A PyQuery object pertaining to a team's schedule table. year : string The requested year to pull stats from. """ for item in schedule: if 'class="thead"' in str(item) or \ 'class="over_header thead"' in str(item): continue # pragma: no cover game = Game(item) self._games.append(game)
def function[_add_games_to_schedule, parameter[self, schedule]]: constant[ Add game information to list of games. Create a Game instance for the given game in the schedule and add it to the list of games the team has or will play during the season. Parameters ---------- schedule : PyQuery object A PyQuery object pertaining to a team's schedule table. year : string The requested year to pull stats from. ] for taget[name[item]] in starred[name[schedule]] begin[:] if <ast.BoolOp object at 0x7da1b0bc98d0> begin[:] continue variable[game] assign[=] call[name[Game], parameter[name[item]]] call[name[self]._games.append, parameter[name[game]]]
keyword[def] identifier[_add_games_to_schedule] ( identifier[self] , identifier[schedule] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[schedule] : keyword[if] literal[string] keyword[in] identifier[str] ( identifier[item] ) keyword[or] literal[string] keyword[in] identifier[str] ( identifier[item] ): keyword[continue] identifier[game] = identifier[Game] ( identifier[item] ) identifier[self] . identifier[_games] . identifier[append] ( identifier[game] )
def _add_games_to_schedule(self, schedule): """ Add game information to list of games. Create a Game instance for the given game in the schedule and add it to the list of games the team has or will play during the season. Parameters ---------- schedule : PyQuery object A PyQuery object pertaining to a team's schedule table. year : string The requested year to pull stats from. """ for item in schedule: if 'class="thead"' in str(item) or 'class="over_header thead"' in str(item): continue # pragma: no cover # depends on [control=['if'], data=[]] game = Game(item) self._games.append(game) # depends on [control=['for'], data=['item']]
def render_head(self, ctx, data): """ Put liveglue content into the header of this page to activate it, but otherwise delegate to my parent's renderer for <head>. """ ctx.tag[tags.invisible(render=tags.directive('liveglue'))] return _PublicPageMixin.render_head(self, ctx, data)
def function[render_head, parameter[self, ctx, data]]: constant[ Put liveglue content into the header of this page to activate it, but otherwise delegate to my parent's renderer for <head>. ] call[name[ctx].tag][call[name[tags].invisible, parameter[]]] return[call[name[_PublicPageMixin].render_head, parameter[name[self], name[ctx], name[data]]]]
keyword[def] identifier[render_head] ( identifier[self] , identifier[ctx] , identifier[data] ): literal[string] identifier[ctx] . identifier[tag] [ identifier[tags] . identifier[invisible] ( identifier[render] = identifier[tags] . identifier[directive] ( literal[string] ))] keyword[return] identifier[_PublicPageMixin] . identifier[render_head] ( identifier[self] , identifier[ctx] , identifier[data] )
def render_head(self, ctx, data): """ Put liveglue content into the header of this page to activate it, but otherwise delegate to my parent's renderer for <head>. """ ctx.tag[tags.invisible(render=tags.directive('liveglue'))] return _PublicPageMixin.render_head(self, ctx, data)
def refresh(self, url=CONST.PANEL_URL): """Refresh the alarm device.""" response_object = AbodeDevice.refresh(self, url) # pylint: disable=W0212 self._abode._panel.update(response_object[0]) return response_object
def function[refresh, parameter[self, url]]: constant[Refresh the alarm device.] variable[response_object] assign[=] call[name[AbodeDevice].refresh, parameter[name[self], name[url]]] call[name[self]._abode._panel.update, parameter[call[name[response_object]][constant[0]]]] return[name[response_object]]
keyword[def] identifier[refresh] ( identifier[self] , identifier[url] = identifier[CONST] . identifier[PANEL_URL] ): literal[string] identifier[response_object] = identifier[AbodeDevice] . identifier[refresh] ( identifier[self] , identifier[url] ) identifier[self] . identifier[_abode] . identifier[_panel] . identifier[update] ( identifier[response_object] [ literal[int] ]) keyword[return] identifier[response_object]
def refresh(self, url=CONST.PANEL_URL): """Refresh the alarm device.""" response_object = AbodeDevice.refresh(self, url) # pylint: disable=W0212 self._abode._panel.update(response_object[0]) return response_object
def downsample(self, df=None, ds_type='kmeans', axis='row', num_samples=100, random_state=1000): ''' Downsample the matrix rows or columns (currently supporting kmeans only). Users can optionally pass in a DataFrame to be downsampled (and this will be incorporated into the network object). ''' return downsample_fun.main(self, df, ds_type, axis, num_samples, random_state)
def function[downsample, parameter[self, df, ds_type, axis, num_samples, random_state]]: constant[ Downsample the matrix rows or columns (currently supporting kmeans only). Users can optionally pass in a DataFrame to be downsampled (and this will be incorporated into the network object). ] return[call[name[downsample_fun].main, parameter[name[self], name[df], name[ds_type], name[axis], name[num_samples], name[random_state]]]]
keyword[def] identifier[downsample] ( identifier[self] , identifier[df] = keyword[None] , identifier[ds_type] = literal[string] , identifier[axis] = literal[string] , identifier[num_samples] = literal[int] , identifier[random_state] = literal[int] ): literal[string] keyword[return] identifier[downsample_fun] . identifier[main] ( identifier[self] , identifier[df] , identifier[ds_type] , identifier[axis] , identifier[num_samples] , identifier[random_state] )
def downsample(self, df=None, ds_type='kmeans', axis='row', num_samples=100, random_state=1000): """ Downsample the matrix rows or columns (currently supporting kmeans only). Users can optionally pass in a DataFrame to be downsampled (and this will be incorporated into the network object). """ return downsample_fun.main(self, df, ds_type, axis, num_samples, random_state)
def _mirror_penalized(self, f_values, idx): """obsolete and subject to removal (TODO), return modified f-values such that for each mirror one becomes worst. This function is useless when selective mirroring is applied with no more than (lambda-mu)/2 solutions. Mirrors are leading and trailing values in ``f_values``. """ assert len(f_values) >= 2 * len(idx) m = np.max(np.abs(f_values)) for i in len(idx): if f_values[idx[i]] > f_values[-1 - i]: f_values[idx[i]] += m else: f_values[-1 - i] += m return f_values
def function[_mirror_penalized, parameter[self, f_values, idx]]: constant[obsolete and subject to removal (TODO), return modified f-values such that for each mirror one becomes worst. This function is useless when selective mirroring is applied with no more than (lambda-mu)/2 solutions. Mirrors are leading and trailing values in ``f_values``. ] assert[compare[call[name[len], parameter[name[f_values]]] greater_or_equal[>=] binary_operation[constant[2] * call[name[len], parameter[name[idx]]]]]] variable[m] assign[=] call[name[np].max, parameter[call[name[np].abs, parameter[name[f_values]]]]] for taget[name[i]] in starred[call[name[len], parameter[name[idx]]]] begin[:] if compare[call[name[f_values]][call[name[idx]][name[i]]] greater[>] call[name[f_values]][binary_operation[<ast.UnaryOp object at 0x7da1b0b5f3d0> - name[i]]]] begin[:] <ast.AugAssign object at 0x7da1b0b5d090> return[name[f_values]]
keyword[def] identifier[_mirror_penalized] ( identifier[self] , identifier[f_values] , identifier[idx] ): literal[string] keyword[assert] identifier[len] ( identifier[f_values] )>= literal[int] * identifier[len] ( identifier[idx] ) identifier[m] = identifier[np] . identifier[max] ( identifier[np] . identifier[abs] ( identifier[f_values] )) keyword[for] identifier[i] keyword[in] identifier[len] ( identifier[idx] ): keyword[if] identifier[f_values] [ identifier[idx] [ identifier[i] ]]> identifier[f_values] [- literal[int] - identifier[i] ]: identifier[f_values] [ identifier[idx] [ identifier[i] ]]+= identifier[m] keyword[else] : identifier[f_values] [- literal[int] - identifier[i] ]+= identifier[m] keyword[return] identifier[f_values]
def _mirror_penalized(self, f_values, idx): """obsolete and subject to removal (TODO), return modified f-values such that for each mirror one becomes worst. This function is useless when selective mirroring is applied with no more than (lambda-mu)/2 solutions. Mirrors are leading and trailing values in ``f_values``. """ assert len(f_values) >= 2 * len(idx) m = np.max(np.abs(f_values)) for i in len(idx): if f_values[idx[i]] > f_values[-1 - i]: f_values[idx[i]] += m # depends on [control=['if'], data=[]] else: f_values[-1 - i] += m # depends on [control=['for'], data=['i']] return f_values
def prepare_mac_header(token, uri, key, http_method, nonce=None, headers=None, body=None, ext='', hash_algorithm='hmac-sha-1', issue_time=None, draft=0): """Add an `MAC Access Authentication`_ signature to headers. Unlike OAuth 1, this HMAC signature does not require inclusion of the request payload/body, neither does it use a combination of client_secret and token_secret but rather a mac_key provided together with the access token. Currently two algorithms are supported, "hmac-sha-1" and "hmac-sha-256", `extension algorithms`_ are not supported. Example MAC Authorization header, linebreaks added for clarity Authorization: MAC id="h480djs93hd8", nonce="1336363200:dj83hs9s", mac="bhCQXTVyfj5cmA9uKkPFx1zeOXM=" .. _`MAC Access Authentication`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 .. _`extension algorithms`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-7.1 :param token: :param uri: Request URI. :param key: MAC given provided by token endpoint. :param http_method: HTTP Request method. :param nonce: :param headers: Request headers as a dictionary. :param body: :param ext: :param hash_algorithm: HMAC algorithm provided by token endpoint. :param issue_time: Time when the MAC credentials were issued (datetime). :param draft: MAC authentication specification version. :return: headers dictionary with the authorization field added. """ http_method = http_method.upper() host, port = utils.host_from_uri(uri) if hash_algorithm.lower() == 'hmac-sha-1': h = hashlib.sha1 elif hash_algorithm.lower() == 'hmac-sha-256': h = hashlib.sha256 else: raise ValueError('unknown hash algorithm') if draft == 0: nonce = nonce or '{0}:{1}'.format(utils.generate_age(issue_time), common.generate_nonce()) else: ts = common.generate_timestamp() nonce = common.generate_nonce() sch, net, path, par, query, fra = urlparse(uri) if query: request_uri = path + '?' + query else: request_uri = path # Hash the body/payload if body is not None and draft == 0: body = body.encode('utf-8') bodyhash = b2a_base64(h(body).digest())[:-1].decode('utf-8') else: bodyhash = '' # Create the normalized base string base = [] if draft == 0: base.append(nonce) else: base.append(ts) base.append(nonce) base.append(http_method.upper()) base.append(request_uri) base.append(host) base.append(port) if draft == 0: base.append(bodyhash) base.append(ext or '') base_string = '\n'.join(base) + '\n' # hmac struggles with unicode strings - http://bugs.python.org/issue5285 if isinstance(key, unicode_type): key = key.encode('utf-8') sign = hmac.new(key, base_string.encode('utf-8'), h) sign = b2a_base64(sign.digest())[:-1].decode('utf-8') header = [] header.append('MAC id="%s"' % token) if draft != 0: header.append('ts="%s"' % ts) header.append('nonce="%s"' % nonce) if bodyhash: header.append('bodyhash="%s"' % bodyhash) if ext: header.append('ext="%s"' % ext) header.append('mac="%s"' % sign) headers = headers or {} headers['Authorization'] = ', '.join(header) return headers
def function[prepare_mac_header, parameter[token, uri, key, http_method, nonce, headers, body, ext, hash_algorithm, issue_time, draft]]: constant[Add an `MAC Access Authentication`_ signature to headers. Unlike OAuth 1, this HMAC signature does not require inclusion of the request payload/body, neither does it use a combination of client_secret and token_secret but rather a mac_key provided together with the access token. Currently two algorithms are supported, "hmac-sha-1" and "hmac-sha-256", `extension algorithms`_ are not supported. Example MAC Authorization header, linebreaks added for clarity Authorization: MAC id="h480djs93hd8", nonce="1336363200:dj83hs9s", mac="bhCQXTVyfj5cmA9uKkPFx1zeOXM=" .. _`MAC Access Authentication`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 .. _`extension algorithms`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-7.1 :param token: :param uri: Request URI. :param key: MAC given provided by token endpoint. :param http_method: HTTP Request method. :param nonce: :param headers: Request headers as a dictionary. :param body: :param ext: :param hash_algorithm: HMAC algorithm provided by token endpoint. :param issue_time: Time when the MAC credentials were issued (datetime). :param draft: MAC authentication specification version. :return: headers dictionary with the authorization field added. ] variable[http_method] assign[=] call[name[http_method].upper, parameter[]] <ast.Tuple object at 0x7da1b179ba30> assign[=] call[name[utils].host_from_uri, parameter[name[uri]]] if compare[call[name[hash_algorithm].lower, parameter[]] equal[==] constant[hmac-sha-1]] begin[:] variable[h] assign[=] name[hashlib].sha1 if compare[name[draft] equal[==] constant[0]] begin[:] variable[nonce] assign[=] <ast.BoolOp object at 0x7da1b179b340> <ast.Tuple object at 0x7da1b17988b0> assign[=] call[name[urlparse], parameter[name[uri]]] if name[query] begin[:] variable[request_uri] assign[=] binary_operation[binary_operation[name[path] + constant[?]] + name[query]] if <ast.BoolOp object at 0x7da1b1798970> begin[:] variable[body] assign[=] call[name[body].encode, parameter[constant[utf-8]]] variable[bodyhash] assign[=] call[call[call[name[b2a_base64], parameter[call[call[name[h], parameter[name[body]]].digest, parameter[]]]]][<ast.Slice object at 0x7da1b179a740>].decode, parameter[constant[utf-8]]] variable[base] assign[=] list[[]] if compare[name[draft] equal[==] constant[0]] begin[:] call[name[base].append, parameter[name[nonce]]] call[name[base].append, parameter[call[name[http_method].upper, parameter[]]]] call[name[base].append, parameter[name[request_uri]]] call[name[base].append, parameter[name[host]]] call[name[base].append, parameter[name[port]]] if compare[name[draft] equal[==] constant[0]] begin[:] call[name[base].append, parameter[name[bodyhash]]] call[name[base].append, parameter[<ast.BoolOp object at 0x7da1b179a890>]] variable[base_string] assign[=] binary_operation[call[constant[ ].join, parameter[name[base]]] + constant[ ]] if call[name[isinstance], parameter[name[key], name[unicode_type]]] begin[:] variable[key] assign[=] call[name[key].encode, parameter[constant[utf-8]]] variable[sign] assign[=] call[name[hmac].new, parameter[name[key], call[name[base_string].encode, parameter[constant[utf-8]]], name[h]]] variable[sign] assign[=] call[call[call[name[b2a_base64], parameter[call[name[sign].digest, parameter[]]]]][<ast.Slice object at 0x7da1b17985e0>].decode, parameter[constant[utf-8]]] variable[header] assign[=] list[[]] call[name[header].append, parameter[binary_operation[constant[MAC id="%s"] <ast.Mod object at 0x7da2590d6920> name[token]]]] if compare[name[draft] not_equal[!=] constant[0]] begin[:] call[name[header].append, parameter[binary_operation[constant[ts="%s"] <ast.Mod object at 0x7da2590d6920> name[ts]]]] call[name[header].append, parameter[binary_operation[constant[nonce="%s"] <ast.Mod object at 0x7da2590d6920> name[nonce]]]] if name[bodyhash] begin[:] call[name[header].append, parameter[binary_operation[constant[bodyhash="%s"] <ast.Mod object at 0x7da2590d6920> name[bodyhash]]]] if name[ext] begin[:] call[name[header].append, parameter[binary_operation[constant[ext="%s"] <ast.Mod object at 0x7da2590d6920> name[ext]]]] call[name[header].append, parameter[binary_operation[constant[mac="%s"] <ast.Mod object at 0x7da2590d6920> name[sign]]]] variable[headers] assign[=] <ast.BoolOp object at 0x7da1b18e5030> call[name[headers]][constant[Authorization]] assign[=] call[constant[, ].join, parameter[name[header]]] return[name[headers]]
keyword[def] identifier[prepare_mac_header] ( identifier[token] , identifier[uri] , identifier[key] , identifier[http_method] , identifier[nonce] = keyword[None] , identifier[headers] = keyword[None] , identifier[body] = keyword[None] , identifier[ext] = literal[string] , identifier[hash_algorithm] = literal[string] , identifier[issue_time] = keyword[None] , identifier[draft] = literal[int] ): literal[string] identifier[http_method] = identifier[http_method] . identifier[upper] () identifier[host] , identifier[port] = identifier[utils] . identifier[host_from_uri] ( identifier[uri] ) keyword[if] identifier[hash_algorithm] . identifier[lower] ()== literal[string] : identifier[h] = identifier[hashlib] . identifier[sha1] keyword[elif] identifier[hash_algorithm] . identifier[lower] ()== literal[string] : identifier[h] = identifier[hashlib] . identifier[sha256] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[draft] == literal[int] : identifier[nonce] = identifier[nonce] keyword[or] literal[string] . identifier[format] ( identifier[utils] . identifier[generate_age] ( identifier[issue_time] ), identifier[common] . identifier[generate_nonce] ()) keyword[else] : identifier[ts] = identifier[common] . identifier[generate_timestamp] () identifier[nonce] = identifier[common] . identifier[generate_nonce] () identifier[sch] , identifier[net] , identifier[path] , identifier[par] , identifier[query] , identifier[fra] = identifier[urlparse] ( identifier[uri] ) keyword[if] identifier[query] : identifier[request_uri] = identifier[path] + literal[string] + identifier[query] keyword[else] : identifier[request_uri] = identifier[path] keyword[if] identifier[body] keyword[is] keyword[not] keyword[None] keyword[and] identifier[draft] == literal[int] : identifier[body] = identifier[body] . identifier[encode] ( literal[string] ) identifier[bodyhash] = identifier[b2a_base64] ( identifier[h] ( identifier[body] ). identifier[digest] ())[:- literal[int] ]. identifier[decode] ( literal[string] ) keyword[else] : identifier[bodyhash] = literal[string] identifier[base] =[] keyword[if] identifier[draft] == literal[int] : identifier[base] . identifier[append] ( identifier[nonce] ) keyword[else] : identifier[base] . identifier[append] ( identifier[ts] ) identifier[base] . identifier[append] ( identifier[nonce] ) identifier[base] . identifier[append] ( identifier[http_method] . identifier[upper] ()) identifier[base] . identifier[append] ( identifier[request_uri] ) identifier[base] . identifier[append] ( identifier[host] ) identifier[base] . identifier[append] ( identifier[port] ) keyword[if] identifier[draft] == literal[int] : identifier[base] . identifier[append] ( identifier[bodyhash] ) identifier[base] . identifier[append] ( identifier[ext] keyword[or] literal[string] ) identifier[base_string] = literal[string] . identifier[join] ( identifier[base] )+ literal[string] keyword[if] identifier[isinstance] ( identifier[key] , identifier[unicode_type] ): identifier[key] = identifier[key] . identifier[encode] ( literal[string] ) identifier[sign] = identifier[hmac] . identifier[new] ( identifier[key] , identifier[base_string] . identifier[encode] ( literal[string] ), identifier[h] ) identifier[sign] = identifier[b2a_base64] ( identifier[sign] . identifier[digest] ())[:- literal[int] ]. identifier[decode] ( literal[string] ) identifier[header] =[] identifier[header] . identifier[append] ( literal[string] % identifier[token] ) keyword[if] identifier[draft] != literal[int] : identifier[header] . identifier[append] ( literal[string] % identifier[ts] ) identifier[header] . identifier[append] ( literal[string] % identifier[nonce] ) keyword[if] identifier[bodyhash] : identifier[header] . identifier[append] ( literal[string] % identifier[bodyhash] ) keyword[if] identifier[ext] : identifier[header] . identifier[append] ( literal[string] % identifier[ext] ) identifier[header] . identifier[append] ( literal[string] % identifier[sign] ) identifier[headers] = identifier[headers] keyword[or] {} identifier[headers] [ literal[string] ]= literal[string] . identifier[join] ( identifier[header] ) keyword[return] identifier[headers]
def prepare_mac_header(token, uri, key, http_method, nonce=None, headers=None, body=None, ext='', hash_algorithm='hmac-sha-1', issue_time=None, draft=0): """Add an `MAC Access Authentication`_ signature to headers. Unlike OAuth 1, this HMAC signature does not require inclusion of the request payload/body, neither does it use a combination of client_secret and token_secret but rather a mac_key provided together with the access token. Currently two algorithms are supported, "hmac-sha-1" and "hmac-sha-256", `extension algorithms`_ are not supported. Example MAC Authorization header, linebreaks added for clarity Authorization: MAC id="h480djs93hd8", nonce="1336363200:dj83hs9s", mac="bhCQXTVyfj5cmA9uKkPFx1zeOXM=" .. _`MAC Access Authentication`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01 .. _`extension algorithms`: https://tools.ietf.org/html/draft-ietf-oauth-v2-http-mac-01#section-7.1 :param token: :param uri: Request URI. :param key: MAC given provided by token endpoint. :param http_method: HTTP Request method. :param nonce: :param headers: Request headers as a dictionary. :param body: :param ext: :param hash_algorithm: HMAC algorithm provided by token endpoint. :param issue_time: Time when the MAC credentials were issued (datetime). :param draft: MAC authentication specification version. :return: headers dictionary with the authorization field added. """ http_method = http_method.upper() (host, port) = utils.host_from_uri(uri) if hash_algorithm.lower() == 'hmac-sha-1': h = hashlib.sha1 # depends on [control=['if'], data=[]] elif hash_algorithm.lower() == 'hmac-sha-256': h = hashlib.sha256 # depends on [control=['if'], data=[]] else: raise ValueError('unknown hash algorithm') if draft == 0: nonce = nonce or '{0}:{1}'.format(utils.generate_age(issue_time), common.generate_nonce()) # depends on [control=['if'], data=[]] else: ts = common.generate_timestamp() nonce = common.generate_nonce() (sch, net, path, par, query, fra) = urlparse(uri) if query: request_uri = path + '?' + query # depends on [control=['if'], data=[]] else: request_uri = path # Hash the body/payload if body is not None and draft == 0: body = body.encode('utf-8') bodyhash = b2a_base64(h(body).digest())[:-1].decode('utf-8') # depends on [control=['if'], data=[]] else: bodyhash = '' # Create the normalized base string base = [] if draft == 0: base.append(nonce) # depends on [control=['if'], data=[]] else: base.append(ts) base.append(nonce) base.append(http_method.upper()) base.append(request_uri) base.append(host) base.append(port) if draft == 0: base.append(bodyhash) # depends on [control=['if'], data=[]] base.append(ext or '') base_string = '\n'.join(base) + '\n' # hmac struggles with unicode strings - http://bugs.python.org/issue5285 if isinstance(key, unicode_type): key = key.encode('utf-8') # depends on [control=['if'], data=[]] sign = hmac.new(key, base_string.encode('utf-8'), h) sign = b2a_base64(sign.digest())[:-1].decode('utf-8') header = [] header.append('MAC id="%s"' % token) if draft != 0: header.append('ts="%s"' % ts) # depends on [control=['if'], data=[]] header.append('nonce="%s"' % nonce) if bodyhash: header.append('bodyhash="%s"' % bodyhash) # depends on [control=['if'], data=[]] if ext: header.append('ext="%s"' % ext) # depends on [control=['if'], data=[]] header.append('mac="%s"' % sign) headers = headers or {} headers['Authorization'] = ', '.join(header) return headers
def get_segments(self): """Get segments for analysis. Creates instance of trans.Segments.""" # Chunking chunk = {k: v.isChecked() for k, v in self.chunk.items()} lock_to_staging = self.lock_to_staging.get_value() epoch_dur = self.epoch_param['dur'].get_value() epoch_overlap = self.epoch_param['overlap_val'].value() epoch_step = None epoch = None if chunk['epoch']: if lock_to_staging: epoch = 'locked' else: epoch = 'unlocked' if self.epoch_param['step'].isChecked(): epoch_step = self.epoch_param['step_val'].get_value() if epoch_step <= 0: epoch_step = 0.1 # Which channel(s) self.chan = self.get_channels() # chan name without group if not self.chan: return # Which event type(s) chan_full = None evt_type = None if chunk['event']: if self.evt_chan_only.get_value(): chan_full = [i + ' (' + self.idx_group.currentText() + '' ')' for i in self.chan] evt_type = self.idx_evt_type.selectedItems() if not evt_type: return else: evt_type = [x.text() for x in evt_type] # Which cycle(s) cycle = self.cycle = self.get_cycles() # Which stage(s) stage = self.idx_stage.selectedItems() if not stage: stage = self.stage = None else: stage = self.stage = [ x.text() for x in self.idx_stage.selectedItems()] # Concatenation cat = {k: v.get_value() for k, v in self.cat.items()} cat = (int(cat['cycle']), int(cat['stage']), int(cat['discontinuous']), int(cat['evt_type'])) # Artefact event rejection reject_event = self.reject_event.get_value() if reject_event == 'channel-specific': chan_full = [i + ' (' + self.idx_group.currentText() + '' ')' for i in self.chan] reject_artf = True elif reject_event == 'from any channel': reject_artf = True else: reject_artf = False # Other options min_dur = self.min_dur.get_value() reject_epoch = self.reject_epoch.get_value() # Generate title for summary plot self.title = self.make_title(chan_full, cycle, stage, evt_type) segments = fetch(self.parent.info.dataset, self.parent.notes.annot, cat=cat, evt_type=evt_type, stage=stage, cycle=cycle, chan_full=chan_full, epoch=epoch, epoch_dur=epoch_dur, epoch_overlap=epoch_overlap, epoch_step=epoch_step, reject_epoch=reject_epoch, reject_artf=reject_artf, min_dur=min_dur) return segments
def function[get_segments, parameter[self]]: constant[Get segments for analysis. Creates instance of trans.Segments.] variable[chunk] assign[=] <ast.DictComp object at 0x7da1b0ec0eb0> variable[lock_to_staging] assign[=] call[name[self].lock_to_staging.get_value, parameter[]] variable[epoch_dur] assign[=] call[call[name[self].epoch_param][constant[dur]].get_value, parameter[]] variable[epoch_overlap] assign[=] call[call[name[self].epoch_param][constant[overlap_val]].value, parameter[]] variable[epoch_step] assign[=] constant[None] variable[epoch] assign[=] constant[None] if call[name[chunk]][constant[epoch]] begin[:] if name[lock_to_staging] begin[:] variable[epoch] assign[=] constant[locked] name[self].chan assign[=] call[name[self].get_channels, parameter[]] if <ast.UnaryOp object at 0x7da1b0ec15d0> begin[:] return[None] variable[chan_full] assign[=] constant[None] variable[evt_type] assign[=] constant[None] if call[name[chunk]][constant[event]] begin[:] if call[name[self].evt_chan_only.get_value, parameter[]] begin[:] variable[chan_full] assign[=] <ast.ListComp object at 0x7da1b0ec1b10> variable[evt_type] assign[=] call[name[self].idx_evt_type.selectedItems, parameter[]] if <ast.UnaryOp object at 0x7da1b0ec1360> begin[:] return[None] variable[cycle] assign[=] call[name[self].get_cycles, parameter[]] variable[stage] assign[=] call[name[self].idx_stage.selectedItems, parameter[]] if <ast.UnaryOp object at 0x7da1b0ec18a0> begin[:] variable[stage] assign[=] constant[None] variable[cat] assign[=] <ast.DictComp object at 0x7da1b0ec3ee0> variable[cat] assign[=] tuple[[<ast.Call object at 0x7da1b0ec17e0>, <ast.Call object at 0x7da1b0ec0bb0>, <ast.Call object at 0x7da1b0ec0730>, <ast.Call object at 0x7da1b0ec33a0>]] variable[reject_event] assign[=] call[name[self].reject_event.get_value, parameter[]] if compare[name[reject_event] equal[==] constant[channel-specific]] begin[:] variable[chan_full] assign[=] <ast.ListComp object at 0x7da1b0ec37f0> variable[reject_artf] assign[=] constant[True] variable[min_dur] assign[=] call[name[self].min_dur.get_value, parameter[]] variable[reject_epoch] assign[=] call[name[self].reject_epoch.get_value, parameter[]] name[self].title assign[=] call[name[self].make_title, parameter[name[chan_full], name[cycle], name[stage], name[evt_type]]] variable[segments] assign[=] call[name[fetch], parameter[name[self].parent.info.dataset, name[self].parent.notes.annot]] return[name[segments]]
keyword[def] identifier[get_segments] ( identifier[self] ): literal[string] identifier[chunk] ={ identifier[k] : identifier[v] . identifier[isChecked] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[chunk] . identifier[items] ()} identifier[lock_to_staging] = identifier[self] . identifier[lock_to_staging] . identifier[get_value] () identifier[epoch_dur] = identifier[self] . identifier[epoch_param] [ literal[string] ]. identifier[get_value] () identifier[epoch_overlap] = identifier[self] . identifier[epoch_param] [ literal[string] ]. identifier[value] () identifier[epoch_step] = keyword[None] identifier[epoch] = keyword[None] keyword[if] identifier[chunk] [ literal[string] ]: keyword[if] identifier[lock_to_staging] : identifier[epoch] = literal[string] keyword[else] : identifier[epoch] = literal[string] keyword[if] identifier[self] . identifier[epoch_param] [ literal[string] ]. identifier[isChecked] (): identifier[epoch_step] = identifier[self] . identifier[epoch_param] [ literal[string] ]. identifier[get_value] () keyword[if] identifier[epoch_step] <= literal[int] : identifier[epoch_step] = literal[int] identifier[self] . identifier[chan] = identifier[self] . identifier[get_channels] () keyword[if] keyword[not] identifier[self] . identifier[chan] : keyword[return] identifier[chan_full] = keyword[None] identifier[evt_type] = keyword[None] keyword[if] identifier[chunk] [ literal[string] ]: keyword[if] identifier[self] . identifier[evt_chan_only] . identifier[get_value] (): identifier[chan_full] =[ identifier[i] + literal[string] + identifier[self] . identifier[idx_group] . identifier[currentText] ()+ literal[string] literal[string] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[chan] ] identifier[evt_type] = identifier[self] . identifier[idx_evt_type] . identifier[selectedItems] () keyword[if] keyword[not] identifier[evt_type] : keyword[return] keyword[else] : identifier[evt_type] =[ identifier[x] . identifier[text] () keyword[for] identifier[x] keyword[in] identifier[evt_type] ] identifier[cycle] = identifier[self] . identifier[cycle] = identifier[self] . identifier[get_cycles] () identifier[stage] = identifier[self] . identifier[idx_stage] . identifier[selectedItems] () keyword[if] keyword[not] identifier[stage] : identifier[stage] = identifier[self] . identifier[stage] = keyword[None] keyword[else] : identifier[stage] = identifier[self] . identifier[stage] =[ identifier[x] . identifier[text] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[idx_stage] . identifier[selectedItems] ()] identifier[cat] ={ identifier[k] : identifier[v] . identifier[get_value] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[cat] . identifier[items] ()} identifier[cat] =( identifier[int] ( identifier[cat] [ literal[string] ]), identifier[int] ( identifier[cat] [ literal[string] ]), identifier[int] ( identifier[cat] [ literal[string] ]), identifier[int] ( identifier[cat] [ literal[string] ])) identifier[reject_event] = identifier[self] . identifier[reject_event] . identifier[get_value] () keyword[if] identifier[reject_event] == literal[string] : identifier[chan_full] =[ identifier[i] + literal[string] + identifier[self] . identifier[idx_group] . identifier[currentText] ()+ literal[string] literal[string] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[chan] ] identifier[reject_artf] = keyword[True] keyword[elif] identifier[reject_event] == literal[string] : identifier[reject_artf] = keyword[True] keyword[else] : identifier[reject_artf] = keyword[False] identifier[min_dur] = identifier[self] . identifier[min_dur] . identifier[get_value] () identifier[reject_epoch] = identifier[self] . identifier[reject_epoch] . identifier[get_value] () identifier[self] . identifier[title] = identifier[self] . identifier[make_title] ( identifier[chan_full] , identifier[cycle] , identifier[stage] , identifier[evt_type] ) identifier[segments] = identifier[fetch] ( identifier[self] . identifier[parent] . identifier[info] . identifier[dataset] , identifier[self] . identifier[parent] . identifier[notes] . identifier[annot] , identifier[cat] = identifier[cat] , identifier[evt_type] = identifier[evt_type] , identifier[stage] = identifier[stage] , identifier[cycle] = identifier[cycle] , identifier[chan_full] = identifier[chan_full] , identifier[epoch] = identifier[epoch] , identifier[epoch_dur] = identifier[epoch_dur] , identifier[epoch_overlap] = identifier[epoch_overlap] , identifier[epoch_step] = identifier[epoch_step] , identifier[reject_epoch] = identifier[reject_epoch] , identifier[reject_artf] = identifier[reject_artf] , identifier[min_dur] = identifier[min_dur] ) keyword[return] identifier[segments]
def get_segments(self): """Get segments for analysis. Creates instance of trans.Segments.""" # Chunking chunk = {k: v.isChecked() for (k, v) in self.chunk.items()} lock_to_staging = self.lock_to_staging.get_value() epoch_dur = self.epoch_param['dur'].get_value() epoch_overlap = self.epoch_param['overlap_val'].value() epoch_step = None epoch = None if chunk['epoch']: if lock_to_staging: epoch = 'locked' # depends on [control=['if'], data=[]] else: epoch = 'unlocked' if self.epoch_param['step'].isChecked(): epoch_step = self.epoch_param['step_val'].get_value() if epoch_step <= 0: epoch_step = 0.1 # depends on [control=['if'], data=['epoch_step']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Which channel(s) self.chan = self.get_channels() # chan name without group if not self.chan: return # depends on [control=['if'], data=[]] # Which event type(s) chan_full = None evt_type = None if chunk['event']: if self.evt_chan_only.get_value(): chan_full = [i + ' (' + self.idx_group.currentText() + ')' for i in self.chan] # depends on [control=['if'], data=[]] evt_type = self.idx_evt_type.selectedItems() if not evt_type: return # depends on [control=['if'], data=[]] else: evt_type = [x.text() for x in evt_type] # depends on [control=['if'], data=[]] # Which cycle(s) cycle = self.cycle = self.get_cycles() # Which stage(s) stage = self.idx_stage.selectedItems() if not stage: stage = self.stage = None # depends on [control=['if'], data=[]] else: stage = self.stage = [x.text() for x in self.idx_stage.selectedItems()] # Concatenation cat = {k: v.get_value() for (k, v) in self.cat.items()} cat = (int(cat['cycle']), int(cat['stage']), int(cat['discontinuous']), int(cat['evt_type'])) # Artefact event rejection reject_event = self.reject_event.get_value() if reject_event == 'channel-specific': chan_full = [i + ' (' + self.idx_group.currentText() + ')' for i in self.chan] reject_artf = True # depends on [control=['if'], data=[]] elif reject_event == 'from any channel': reject_artf = True # depends on [control=['if'], data=[]] else: reject_artf = False # Other options min_dur = self.min_dur.get_value() reject_epoch = self.reject_epoch.get_value() # Generate title for summary plot self.title = self.make_title(chan_full, cycle, stage, evt_type) segments = fetch(self.parent.info.dataset, self.parent.notes.annot, cat=cat, evt_type=evt_type, stage=stage, cycle=cycle, chan_full=chan_full, epoch=epoch, epoch_dur=epoch_dur, epoch_overlap=epoch_overlap, epoch_step=epoch_step, reject_epoch=reject_epoch, reject_artf=reject_artf, min_dur=min_dur) return segments
def _strip_extra(elements): """Remove the "extra == ..." operands from the list. This is not a comprehensive implementation, but relies on an important characteristic of metadata generation: The "extra == ..." operand is always associated with an "and" operator. This means that we can simply remove the operand and the "and" operator associated with it. """ extra_indexes = [] for i, element in enumerate(elements): if isinstance(element, list): cancelled = _strip_extra(element) if cancelled: extra_indexes.append(i) elif isinstance(element, tuple) and element[0].value == "extra": extra_indexes.append(i) for i in reversed(extra_indexes): del elements[i] if i > 0 and elements[i - 1] == "and": # Remove the "and" before it. del elements[i - 1] elif elements: # This shouldn't ever happen, but is included for completeness. # If there is not an "and" before this element, try to remove the # operator after it. del elements[0] return (not elements)
def function[_strip_extra, parameter[elements]]: constant[Remove the "extra == ..." operands from the list. This is not a comprehensive implementation, but relies on an important characteristic of metadata generation: The "extra == ..." operand is always associated with an "and" operator. This means that we can simply remove the operand and the "and" operator associated with it. ] variable[extra_indexes] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b1f82d40>, <ast.Name object at 0x7da1b1f810f0>]]] in starred[call[name[enumerate], parameter[name[elements]]]] begin[:] if call[name[isinstance], parameter[name[element], name[list]]] begin[:] variable[cancelled] assign[=] call[name[_strip_extra], parameter[name[element]]] if name[cancelled] begin[:] call[name[extra_indexes].append, parameter[name[i]]] for taget[name[i]] in starred[call[name[reversed], parameter[name[extra_indexes]]]] begin[:] <ast.Delete object at 0x7da1b1f80460> if <ast.BoolOp object at 0x7da1b1f83610> begin[:] <ast.Delete object at 0x7da1b1f82e90> return[<ast.UnaryOp object at 0x7da1b1f83a30>]
keyword[def] identifier[_strip_extra] ( identifier[elements] ): literal[string] identifier[extra_indexes] =[] keyword[for] identifier[i] , identifier[element] keyword[in] identifier[enumerate] ( identifier[elements] ): keyword[if] identifier[isinstance] ( identifier[element] , identifier[list] ): identifier[cancelled] = identifier[_strip_extra] ( identifier[element] ) keyword[if] identifier[cancelled] : identifier[extra_indexes] . identifier[append] ( identifier[i] ) keyword[elif] identifier[isinstance] ( identifier[element] , identifier[tuple] ) keyword[and] identifier[element] [ literal[int] ]. identifier[value] == literal[string] : identifier[extra_indexes] . identifier[append] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[reversed] ( identifier[extra_indexes] ): keyword[del] identifier[elements] [ identifier[i] ] keyword[if] identifier[i] > literal[int] keyword[and] identifier[elements] [ identifier[i] - literal[int] ]== literal[string] : keyword[del] identifier[elements] [ identifier[i] - literal[int] ] keyword[elif] identifier[elements] : keyword[del] identifier[elements] [ literal[int] ] keyword[return] ( keyword[not] identifier[elements] )
def _strip_extra(elements): """Remove the "extra == ..." operands from the list. This is not a comprehensive implementation, but relies on an important characteristic of metadata generation: The "extra == ..." operand is always associated with an "and" operator. This means that we can simply remove the operand and the "and" operator associated with it. """ extra_indexes = [] for (i, element) in enumerate(elements): if isinstance(element, list): cancelled = _strip_extra(element) if cancelled: extra_indexes.append(i) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(element, tuple) and element[0].value == 'extra': extra_indexes.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] for i in reversed(extra_indexes): del elements[i] if i > 0 and elements[i - 1] == 'and': # Remove the "and" before it. del elements[i - 1] # depends on [control=['if'], data=[]] elif elements: # This shouldn't ever happen, but is included for completeness. # If there is not an "and" before this element, try to remove the # operator after it. del elements[0] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return not elements
def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None, **kwargs): """Create a new bot in a particular group. :param str name: bot name :param str avatar_url: the URL of an image to use as an avatar :param str callback_url: a POST-back URL for each new message :param bool dm_notification: whether to POST-back for direct messages? :return: the new bot :rtype: :class:`~groupy.api.bots.Bot` """ return self._bots.create(name=name, group_id=self.group_id, avatar_url=avatar_url, callback_url=callback_url, dm_notification=dm_notification)
def function[create_bot, parameter[self, name, avatar_url, callback_url, dm_notification]]: constant[Create a new bot in a particular group. :param str name: bot name :param str avatar_url: the URL of an image to use as an avatar :param str callback_url: a POST-back URL for each new message :param bool dm_notification: whether to POST-back for direct messages? :return: the new bot :rtype: :class:`~groupy.api.bots.Bot` ] return[call[name[self]._bots.create, parameter[]]]
keyword[def] identifier[create_bot] ( identifier[self] , identifier[name] , identifier[avatar_url] = keyword[None] , identifier[callback_url] = keyword[None] , identifier[dm_notification] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_bots] . identifier[create] ( identifier[name] = identifier[name] , identifier[group_id] = identifier[self] . identifier[group_id] , identifier[avatar_url] = identifier[avatar_url] , identifier[callback_url] = identifier[callback_url] , identifier[dm_notification] = identifier[dm_notification] )
def create_bot(self, name, avatar_url=None, callback_url=None, dm_notification=None, **kwargs): """Create a new bot in a particular group. :param str name: bot name :param str avatar_url: the URL of an image to use as an avatar :param str callback_url: a POST-back URL for each new message :param bool dm_notification: whether to POST-back for direct messages? :return: the new bot :rtype: :class:`~groupy.api.bots.Bot` """ return self._bots.create(name=name, group_id=self.group_id, avatar_url=avatar_url, callback_url=callback_url, dm_notification=dm_notification)
def open_only(func): """ Decorator for `.Channel` methods which performs an openness check. :raises: `.SSHException` -- If the wrapped method is called on an unopened `.Channel`. """ @wraps(func) def _check(self, *args, **kwds): if ( self.closed or self.eof_received or self.eof_sent or not self.active ): raise SSHException("Channel is not open") return func(self, *args, **kwds) return _check
def function[open_only, parameter[func]]: constant[ Decorator for `.Channel` methods which performs an openness check. :raises: `.SSHException` -- If the wrapped method is called on an unopened `.Channel`. ] def function[_check, parameter[self]]: if <ast.BoolOp object at 0x7da1b2198fd0> begin[:] <ast.Raise object at 0x7da1b2198fa0> return[call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b219a9b0>]]] return[name[_check]]
keyword[def] identifier[open_only] ( identifier[func] ): literal[string] @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[_check] ( identifier[self] ,* identifier[args] ,** identifier[kwds] ): keyword[if] ( identifier[self] . identifier[closed] keyword[or] identifier[self] . identifier[eof_received] keyword[or] identifier[self] . identifier[eof_sent] keyword[or] keyword[not] identifier[self] . identifier[active] ): keyword[raise] identifier[SSHException] ( literal[string] ) keyword[return] identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwds] ) keyword[return] identifier[_check]
def open_only(func): """ Decorator for `.Channel` methods which performs an openness check. :raises: `.SSHException` -- If the wrapped method is called on an unopened `.Channel`. """ @wraps(func) def _check(self, *args, **kwds): if self.closed or self.eof_received or self.eof_sent or (not self.active): raise SSHException('Channel is not open') # depends on [control=['if'], data=[]] return func(self, *args, **kwds) return _check
def calculate_item_depth(self, tree_alias, item_id, depth=0): """Calculates depth of the item in the tree. :param str|unicode tree_alias: :param int item_id: :param int depth: :rtype: int """ item = self.get_item_by_id(tree_alias, item_id) if hasattr(item, 'depth'): depth = item.depth + depth else: if item.parent is not None: depth = self.calculate_item_depth(tree_alias, item.parent.id, depth + 1) return depth
def function[calculate_item_depth, parameter[self, tree_alias, item_id, depth]]: constant[Calculates depth of the item in the tree. :param str|unicode tree_alias: :param int item_id: :param int depth: :rtype: int ] variable[item] assign[=] call[name[self].get_item_by_id, parameter[name[tree_alias], name[item_id]]] if call[name[hasattr], parameter[name[item], constant[depth]]] begin[:] variable[depth] assign[=] binary_operation[name[item].depth + name[depth]] return[name[depth]]
keyword[def] identifier[calculate_item_depth] ( identifier[self] , identifier[tree_alias] , identifier[item_id] , identifier[depth] = literal[int] ): literal[string] identifier[item] = identifier[self] . identifier[get_item_by_id] ( identifier[tree_alias] , identifier[item_id] ) keyword[if] identifier[hasattr] ( identifier[item] , literal[string] ): identifier[depth] = identifier[item] . identifier[depth] + identifier[depth] keyword[else] : keyword[if] identifier[item] . identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[depth] = identifier[self] . identifier[calculate_item_depth] ( identifier[tree_alias] , identifier[item] . identifier[parent] . identifier[id] , identifier[depth] + literal[int] ) keyword[return] identifier[depth]
def calculate_item_depth(self, tree_alias, item_id, depth=0): """Calculates depth of the item in the tree. :param str|unicode tree_alias: :param int item_id: :param int depth: :rtype: int """ item = self.get_item_by_id(tree_alias, item_id) if hasattr(item, 'depth'): depth = item.depth + depth # depends on [control=['if'], data=[]] elif item.parent is not None: depth = self.calculate_item_depth(tree_alias, item.parent.id, depth + 1) # depends on [control=['if'], data=[]] return depth
def float_with_multiplier(string): """Convert string with optional k, M, G, T multiplier to float""" match = re_float_with_multiplier.search(string) if not match or not match.group('num'): raise ValueError('String "{}" is not numeric!'.format(string)) num = float(match.group('num')) multi = match.group('multi') if multi: try: num *= multipliers[multi] except KeyError: raise ValueError('Unknown multiplier: {}'.format(multi)) return num
def function[float_with_multiplier, parameter[string]]: constant[Convert string with optional k, M, G, T multiplier to float] variable[match] assign[=] call[name[re_float_with_multiplier].search, parameter[name[string]]] if <ast.BoolOp object at 0x7da1b023c610> begin[:] <ast.Raise object at 0x7da1b023ca90> variable[num] assign[=] call[name[float], parameter[call[name[match].group, parameter[constant[num]]]]] variable[multi] assign[=] call[name[match].group, parameter[constant[multi]]] if name[multi] begin[:] <ast.Try object at 0x7da1b023e380> return[name[num]]
keyword[def] identifier[float_with_multiplier] ( identifier[string] ): literal[string] identifier[match] = identifier[re_float_with_multiplier] . identifier[search] ( identifier[string] ) keyword[if] keyword[not] identifier[match] keyword[or] keyword[not] identifier[match] . identifier[group] ( literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[string] )) identifier[num] = identifier[float] ( identifier[match] . identifier[group] ( literal[string] )) identifier[multi] = identifier[match] . identifier[group] ( literal[string] ) keyword[if] identifier[multi] : keyword[try] : identifier[num] *= identifier[multipliers] [ identifier[multi] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[multi] )) keyword[return] identifier[num]
def float_with_multiplier(string): """Convert string with optional k, M, G, T multiplier to float""" match = re_float_with_multiplier.search(string) if not match or not match.group('num'): raise ValueError('String "{}" is not numeric!'.format(string)) # depends on [control=['if'], data=[]] num = float(match.group('num')) multi = match.group('multi') if multi: try: num *= multipliers[multi] # depends on [control=['try'], data=[]] except KeyError: raise ValueError('Unknown multiplier: {}'.format(multi)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return num
def get_dev_mac_learn(devid, auth, url): ''' function takes devid of specific device and issues a RESTFUL call to gather the current IP-MAC learning entries on the target device. :param devid: int value of the target device :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: list of dict objects which contain the mac learn table of target device id :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.device import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> dev_mac_learn = get_dev_mac_learn('10', auth.creds, auth.url) >>> assert type(dev_mac_learn) is list >>> assert 'deviceId' in dev_mac_learn[0] ''' get_dev_mac_learn_url='/imcrs/res/access/ipMacLearn/'+str(devid) f_url = url+get_dev_mac_learn_url try: r = requests.get(f_url, auth=auth, headers=HEADERS) if r.status_code == 200: if len(r.text) < 1: mac_learn_query = {} return mac_learn_query else: mac_learn_query = (json.loads(r.text))['ipMacLearnResult'] return mac_learn_query except requests.exceptions.RequestException as e: return "Error:\n" + str(e) + " get_dev_mac_learn: An Error has occured"
def function[get_dev_mac_learn, parameter[devid, auth, url]]: constant[ function takes devid of specific device and issues a RESTFUL call to gather the current IP-MAC learning entries on the target device. :param devid: int value of the target device :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: list of dict objects which contain the mac learn table of target device id :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.device import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> dev_mac_learn = get_dev_mac_learn('10', auth.creds, auth.url) >>> assert type(dev_mac_learn) is list >>> assert 'deviceId' in dev_mac_learn[0] ] variable[get_dev_mac_learn_url] assign[=] binary_operation[constant[/imcrs/res/access/ipMacLearn/] + call[name[str], parameter[name[devid]]]] variable[f_url] assign[=] binary_operation[name[url] + name[get_dev_mac_learn_url]] <ast.Try object at 0x7da18f09ca30>
keyword[def] identifier[get_dev_mac_learn] ( identifier[devid] , identifier[auth] , identifier[url] ): literal[string] identifier[get_dev_mac_learn_url] = literal[string] + identifier[str] ( identifier[devid] ) identifier[f_url] = identifier[url] + identifier[get_dev_mac_learn_url] keyword[try] : identifier[r] = identifier[requests] . identifier[get] ( identifier[f_url] , identifier[auth] = identifier[auth] , identifier[headers] = identifier[HEADERS] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[if] identifier[len] ( identifier[r] . identifier[text] )< literal[int] : identifier[mac_learn_query] ={} keyword[return] identifier[mac_learn_query] keyword[else] : identifier[mac_learn_query] =( identifier[json] . identifier[loads] ( identifier[r] . identifier[text] ))[ literal[string] ] keyword[return] identifier[mac_learn_query] keyword[except] identifier[requests] . identifier[exceptions] . identifier[RequestException] keyword[as] identifier[e] : keyword[return] literal[string] + identifier[str] ( identifier[e] )+ literal[string]
def get_dev_mac_learn(devid, auth, url): """ function takes devid of specific device and issues a RESTFUL call to gather the current IP-MAC learning entries on the target device. :param devid: int value of the target device :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: list of dict objects which contain the mac learn table of target device id :rtype: list >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.device import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> dev_mac_learn = get_dev_mac_learn('10', auth.creds, auth.url) >>> assert type(dev_mac_learn) is list >>> assert 'deviceId' in dev_mac_learn[0] """ get_dev_mac_learn_url = '/imcrs/res/access/ipMacLearn/' + str(devid) f_url = url + get_dev_mac_learn_url try: r = requests.get(f_url, auth=auth, headers=HEADERS) if r.status_code == 200: if len(r.text) < 1: mac_learn_query = {} return mac_learn_query # depends on [control=['if'], data=[]] else: mac_learn_query = json.loads(r.text)['ipMacLearnResult'] return mac_learn_query # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except requests.exceptions.RequestException as e: return 'Error:\n' + str(e) + ' get_dev_mac_learn: An Error has occured' # depends on [control=['except'], data=['e']]
def from_json(buffer, auto_flatten=True, raise_for_index=True): """Parses a JSON string into either a view or an index. If auto flatten is enabled a sourcemap index that does not contain external references is automatically flattened into a view. By default if an index would be returned an `IndexedSourceMap` error is raised instead which holds the index. """ buffer = to_bytes(buffer) view_out = _ffi.new('lsm_view_t **') index_out = _ffi.new('lsm_index_t **') buffer = to_bytes(buffer) rv = rustcall( _lib.lsm_view_or_index_from_json, buffer, len(buffer), view_out, index_out) if rv == 1: return View._from_ptr(view_out[0]) elif rv == 2: index = Index._from_ptr(index_out[0]) if auto_flatten and index.can_flatten: return index.into_view() if raise_for_index: raise IndexedSourceMap('Unexpected source map index', index=index) return index else: raise AssertionError('Unknown response from C ABI (%r)' % rv)
def function[from_json, parameter[buffer, auto_flatten, raise_for_index]]: constant[Parses a JSON string into either a view or an index. If auto flatten is enabled a sourcemap index that does not contain external references is automatically flattened into a view. By default if an index would be returned an `IndexedSourceMap` error is raised instead which holds the index. ] variable[buffer] assign[=] call[name[to_bytes], parameter[name[buffer]]] variable[view_out] assign[=] call[name[_ffi].new, parameter[constant[lsm_view_t **]]] variable[index_out] assign[=] call[name[_ffi].new, parameter[constant[lsm_index_t **]]] variable[buffer] assign[=] call[name[to_bytes], parameter[name[buffer]]] variable[rv] assign[=] call[name[rustcall], parameter[name[_lib].lsm_view_or_index_from_json, name[buffer], call[name[len], parameter[name[buffer]]], name[view_out], name[index_out]]] if compare[name[rv] equal[==] constant[1]] begin[:] return[call[name[View]._from_ptr, parameter[call[name[view_out]][constant[0]]]]]
keyword[def] identifier[from_json] ( identifier[buffer] , identifier[auto_flatten] = keyword[True] , identifier[raise_for_index] = keyword[True] ): literal[string] identifier[buffer] = identifier[to_bytes] ( identifier[buffer] ) identifier[view_out] = identifier[_ffi] . identifier[new] ( literal[string] ) identifier[index_out] = identifier[_ffi] . identifier[new] ( literal[string] ) identifier[buffer] = identifier[to_bytes] ( identifier[buffer] ) identifier[rv] = identifier[rustcall] ( identifier[_lib] . identifier[lsm_view_or_index_from_json] , identifier[buffer] , identifier[len] ( identifier[buffer] ), identifier[view_out] , identifier[index_out] ) keyword[if] identifier[rv] == literal[int] : keyword[return] identifier[View] . identifier[_from_ptr] ( identifier[view_out] [ literal[int] ]) keyword[elif] identifier[rv] == literal[int] : identifier[index] = identifier[Index] . identifier[_from_ptr] ( identifier[index_out] [ literal[int] ]) keyword[if] identifier[auto_flatten] keyword[and] identifier[index] . identifier[can_flatten] : keyword[return] identifier[index] . identifier[into_view] () keyword[if] identifier[raise_for_index] : keyword[raise] identifier[IndexedSourceMap] ( literal[string] , identifier[index] = identifier[index] ) keyword[return] identifier[index] keyword[else] : keyword[raise] identifier[AssertionError] ( literal[string] % identifier[rv] )
def from_json(buffer, auto_flatten=True, raise_for_index=True): """Parses a JSON string into either a view or an index. If auto flatten is enabled a sourcemap index that does not contain external references is automatically flattened into a view. By default if an index would be returned an `IndexedSourceMap` error is raised instead which holds the index. """ buffer = to_bytes(buffer) view_out = _ffi.new('lsm_view_t **') index_out = _ffi.new('lsm_index_t **') buffer = to_bytes(buffer) rv = rustcall(_lib.lsm_view_or_index_from_json, buffer, len(buffer), view_out, index_out) if rv == 1: return View._from_ptr(view_out[0]) # depends on [control=['if'], data=[]] elif rv == 2: index = Index._from_ptr(index_out[0]) if auto_flatten and index.can_flatten: return index.into_view() # depends on [control=['if'], data=[]] if raise_for_index: raise IndexedSourceMap('Unexpected source map index', index=index) # depends on [control=['if'], data=[]] return index # depends on [control=['if'], data=[]] else: raise AssertionError('Unknown response from C ABI (%r)' % rv)
def validate_create_package(package_format, owner, repo, **kwargs): """Validate parameters for creating a package.""" client = get_packages_api() with catch_raise_api_exception(): check = getattr( client, "packages_validate_upload_%s_with_http_info" % package_format ) _, _, headers = check( owner=owner, repo=repo, data=make_create_payload(**kwargs) ) ratelimits.maybe_rate_limit(client, headers) return True
def function[validate_create_package, parameter[package_format, owner, repo]]: constant[Validate parameters for creating a package.] variable[client] assign[=] call[name[get_packages_api], parameter[]] with call[name[catch_raise_api_exception], parameter[]] begin[:] variable[check] assign[=] call[name[getattr], parameter[name[client], binary_operation[constant[packages_validate_upload_%s_with_http_info] <ast.Mod object at 0x7da2590d6920> name[package_format]]]] <ast.Tuple object at 0x7da1b19e79a0> assign[=] call[name[check], parameter[]] call[name[ratelimits].maybe_rate_limit, parameter[name[client], name[headers]]] return[constant[True]]
keyword[def] identifier[validate_create_package] ( identifier[package_format] , identifier[owner] , identifier[repo] ,** identifier[kwargs] ): literal[string] identifier[client] = identifier[get_packages_api] () keyword[with] identifier[catch_raise_api_exception] (): identifier[check] = identifier[getattr] ( identifier[client] , literal[string] % identifier[package_format] ) identifier[_] , identifier[_] , identifier[headers] = identifier[check] ( identifier[owner] = identifier[owner] , identifier[repo] = identifier[repo] , identifier[data] = identifier[make_create_payload] (** identifier[kwargs] ) ) identifier[ratelimits] . identifier[maybe_rate_limit] ( identifier[client] , identifier[headers] ) keyword[return] keyword[True]
def validate_create_package(package_format, owner, repo, **kwargs): """Validate parameters for creating a package.""" client = get_packages_api() with catch_raise_api_exception(): check = getattr(client, 'packages_validate_upload_%s_with_http_info' % package_format) (_, _, headers) = check(owner=owner, repo=repo, data=make_create_payload(**kwargs)) # depends on [control=['with'], data=[]] ratelimits.maybe_rate_limit(client, headers) return True
def convert(value): """Converts to a C language appropriate identifier format. """ s0 = "Sbp" + value if value in COLLISIONS else value s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', s0) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower() + "_t"
def function[convert, parameter[value]]: constant[Converts to a C language appropriate identifier format. ] variable[s0] assign[=] <ast.IfExp object at 0x7da1b0510e80> variable[s1] assign[=] call[name[re].sub, parameter[constant[(.)([A-Z][a-z]+)], constant[\1_\2], name[s0]]] return[binary_operation[call[call[name[re].sub, parameter[constant[([a-z0-9])([A-Z])], constant[\1_\2], name[s1]]].lower, parameter[]] + constant[_t]]]
keyword[def] identifier[convert] ( identifier[value] ): literal[string] identifier[s0] = literal[string] + identifier[value] keyword[if] identifier[value] keyword[in] identifier[COLLISIONS] keyword[else] identifier[value] identifier[s1] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[s0] ) keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[s1] ). identifier[lower] ()+ literal[string]
def convert(value): """Converts to a C language appropriate identifier format. """ s0 = 'Sbp' + value if value in COLLISIONS else value s1 = re.sub('(.)([A-Z][a-z]+)', '\\1_\\2', s0) return re.sub('([a-z0-9])([A-Z])', '\\1_\\2', s1).lower() + '_t'
def parse(self): """Parse the table data string into records.""" self.parse_fields() records = [] for line in self.t['data'].split('\n'): if EMPTY_ROW.match(line): continue row = [self.autoconvert(line[start_field:end_field+1]) for start_field, end_field in self.fields] records.append(tuple(row)) self.records = records
def function[parse, parameter[self]]: constant[Parse the table data string into records.] call[name[self].parse_fields, parameter[]] variable[records] assign[=] list[[]] for taget[name[line]] in starred[call[call[name[self].t][constant[data]].split, parameter[constant[ ]]]] begin[:] if call[name[EMPTY_ROW].match, parameter[name[line]]] begin[:] continue variable[row] assign[=] <ast.ListComp object at 0x7da18fe924d0> call[name[records].append, parameter[call[name[tuple], parameter[name[row]]]]] name[self].records assign[=] name[records]
keyword[def] identifier[parse] ( identifier[self] ): literal[string] identifier[self] . identifier[parse_fields] () identifier[records] =[] keyword[for] identifier[line] keyword[in] identifier[self] . identifier[t] [ literal[string] ]. identifier[split] ( literal[string] ): keyword[if] identifier[EMPTY_ROW] . identifier[match] ( identifier[line] ): keyword[continue] identifier[row] =[ identifier[self] . identifier[autoconvert] ( identifier[line] [ identifier[start_field] : identifier[end_field] + literal[int] ]) keyword[for] identifier[start_field] , identifier[end_field] keyword[in] identifier[self] . identifier[fields] ] identifier[records] . identifier[append] ( identifier[tuple] ( identifier[row] )) identifier[self] . identifier[records] = identifier[records]
def parse(self): """Parse the table data string into records.""" self.parse_fields() records = [] for line in self.t['data'].split('\n'): if EMPTY_ROW.match(line): continue # depends on [control=['if'], data=[]] row = [self.autoconvert(line[start_field:end_field + 1]) for (start_field, end_field) in self.fields] records.append(tuple(row)) # depends on [control=['for'], data=['line']] self.records = records
def solve(self, print_solution=False): """Solves the current integer program and returns the computed layout. Args: print_solution: An optional boolean indicating whether to print the full solution in human-readable format. Returns: The computed layout (as a string). Raises: SolverError: the internal solver could not find a solution, or the solution found is infeasible. """ # Solve and see how well the solver did. self._cp_solver = cp_model.CpSolver() status = self._cp_solver.Solve(self._model) if status != cp_model.OPTIMAL: if status == cp_model.FEASIBLE: logging.warning("A potentially suboptimal solution was found.") else: logging.error("Solver returned status %d.", status) raise SolverError("The solver could not solve the problem and returned " "status {}.".format(status)) # TODO(joshuawang): Verify the solver's solution. if print_solution: print_cp_model_solution.print_solution(self._model, self._cp_solver) # Reconstruct layout from solution. layout = [] for mtf_dimension_name in ( self._layout_validator.splittable_mtf_dimension_names): for mesh_dimension_name in ( self._layout_validator.mesh_dimension_name_to_size): value = self._cp_solver.Value(self._global_vars[(mtf_dimension_name, mesh_dimension_name)]) if value: # Value is integer. layout.append(mtf_dimension_name + ":" + mesh_dimension_name) layout.sort() return ";".join(layout)
def function[solve, parameter[self, print_solution]]: constant[Solves the current integer program and returns the computed layout. Args: print_solution: An optional boolean indicating whether to print the full solution in human-readable format. Returns: The computed layout (as a string). Raises: SolverError: the internal solver could not find a solution, or the solution found is infeasible. ] name[self]._cp_solver assign[=] call[name[cp_model].CpSolver, parameter[]] variable[status] assign[=] call[name[self]._cp_solver.Solve, parameter[name[self]._model]] if compare[name[status] not_equal[!=] name[cp_model].OPTIMAL] begin[:] if compare[name[status] equal[==] name[cp_model].FEASIBLE] begin[:] call[name[logging].warning, parameter[constant[A potentially suboptimal solution was found.]]] if name[print_solution] begin[:] call[name[print_cp_model_solution].print_solution, parameter[name[self]._model, name[self]._cp_solver]] variable[layout] assign[=] list[[]] for taget[name[mtf_dimension_name]] in starred[name[self]._layout_validator.splittable_mtf_dimension_names] begin[:] for taget[name[mesh_dimension_name]] in starred[name[self]._layout_validator.mesh_dimension_name_to_size] begin[:] variable[value] assign[=] call[name[self]._cp_solver.Value, parameter[call[name[self]._global_vars][tuple[[<ast.Name object at 0x7da207f02a40>, <ast.Name object at 0x7da207f011b0>]]]]] if name[value] begin[:] call[name[layout].append, parameter[binary_operation[binary_operation[name[mtf_dimension_name] + constant[:]] + name[mesh_dimension_name]]]] call[name[layout].sort, parameter[]] return[call[constant[;].join, parameter[name[layout]]]]
keyword[def] identifier[solve] ( identifier[self] , identifier[print_solution] = keyword[False] ): literal[string] identifier[self] . identifier[_cp_solver] = identifier[cp_model] . identifier[CpSolver] () identifier[status] = identifier[self] . identifier[_cp_solver] . identifier[Solve] ( identifier[self] . identifier[_model] ) keyword[if] identifier[status] != identifier[cp_model] . identifier[OPTIMAL] : keyword[if] identifier[status] == identifier[cp_model] . identifier[FEASIBLE] : identifier[logging] . identifier[warning] ( literal[string] ) keyword[else] : identifier[logging] . identifier[error] ( literal[string] , identifier[status] ) keyword[raise] identifier[SolverError] ( literal[string] literal[string] . identifier[format] ( identifier[status] )) keyword[if] identifier[print_solution] : identifier[print_cp_model_solution] . identifier[print_solution] ( identifier[self] . identifier[_model] , identifier[self] . identifier[_cp_solver] ) identifier[layout] =[] keyword[for] identifier[mtf_dimension_name] keyword[in] ( identifier[self] . identifier[_layout_validator] . identifier[splittable_mtf_dimension_names] ): keyword[for] identifier[mesh_dimension_name] keyword[in] ( identifier[self] . identifier[_layout_validator] . identifier[mesh_dimension_name_to_size] ): identifier[value] = identifier[self] . identifier[_cp_solver] . identifier[Value] ( identifier[self] . identifier[_global_vars] [( identifier[mtf_dimension_name] , identifier[mesh_dimension_name] )]) keyword[if] identifier[value] : identifier[layout] . identifier[append] ( identifier[mtf_dimension_name] + literal[string] + identifier[mesh_dimension_name] ) identifier[layout] . identifier[sort] () keyword[return] literal[string] . identifier[join] ( identifier[layout] )
def solve(self, print_solution=False): """Solves the current integer program and returns the computed layout. Args: print_solution: An optional boolean indicating whether to print the full solution in human-readable format. Returns: The computed layout (as a string). Raises: SolverError: the internal solver could not find a solution, or the solution found is infeasible. """ # Solve and see how well the solver did. self._cp_solver = cp_model.CpSolver() status = self._cp_solver.Solve(self._model) if status != cp_model.OPTIMAL: if status == cp_model.FEASIBLE: logging.warning('A potentially suboptimal solution was found.') # depends on [control=['if'], data=[]] else: logging.error('Solver returned status %d.', status) raise SolverError('The solver could not solve the problem and returned status {}.'.format(status)) # depends on [control=['if'], data=['status']] # TODO(joshuawang): Verify the solver's solution. if print_solution: print_cp_model_solution.print_solution(self._model, self._cp_solver) # depends on [control=['if'], data=[]] # Reconstruct layout from solution. layout = [] for mtf_dimension_name in self._layout_validator.splittable_mtf_dimension_names: for mesh_dimension_name in self._layout_validator.mesh_dimension_name_to_size: value = self._cp_solver.Value(self._global_vars[mtf_dimension_name, mesh_dimension_name]) if value: # Value is integer. layout.append(mtf_dimension_name + ':' + mesh_dimension_name) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['mesh_dimension_name']] # depends on [control=['for'], data=['mtf_dimension_name']] layout.sort() return ';'.join(layout)
def sub(cls, *mixins_and_dicts, **values): """Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments. """ class SubInjector(cls): pass mixins = [ x for x in mixins_and_dicts if isinstance(x, type) ] if mixins: SubInjector.__bases__ = tuple(mixins) + SubInjector.__bases__ dicts = [ x for x in mixins_and_dicts if not isinstance(x, type) ] for d in reversed(dicts): for k,v in d.items(): if k not in values: values[k] = v for k,v in values.items(): SubInjector.value(k, v) return SubInjector()
def function[sub, parameter[cls]]: constant[Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments. ] class class[SubInjector, parameter[]] begin[:] pass variable[mixins] assign[=] <ast.ListComp object at 0x7da20c6e7520> if name[mixins] begin[:] name[SubInjector].__bases__ assign[=] binary_operation[call[name[tuple], parameter[name[mixins]]] + name[SubInjector].__bases__] variable[dicts] assign[=] <ast.ListComp object at 0x7da20c6e5f30> for taget[name[d]] in starred[call[name[reversed], parameter[name[dicts]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20c6e7970>, <ast.Name object at 0x7da20c6e6770>]]] in starred[call[name[d].items, parameter[]]] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[values]] begin[:] call[name[values]][name[k]] assign[=] name[v] for taget[tuple[[<ast.Name object at 0x7da18eb55420>, <ast.Name object at 0x7da18eb56e90>]]] in starred[call[name[values].items, parameter[]]] begin[:] call[name[SubInjector].value, parameter[name[k], name[v]]] return[call[name[SubInjector], parameter[]]]
keyword[def] identifier[sub] ( identifier[cls] ,* identifier[mixins_and_dicts] ,** identifier[values] ): literal[string] keyword[class] identifier[SubInjector] ( identifier[cls] ): keyword[pass] identifier[mixins] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[mixins_and_dicts] keyword[if] identifier[isinstance] ( identifier[x] , identifier[type] )] keyword[if] identifier[mixins] : identifier[SubInjector] . identifier[__bases__] = identifier[tuple] ( identifier[mixins] )+ identifier[SubInjector] . identifier[__bases__] identifier[dicts] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[mixins_and_dicts] keyword[if] keyword[not] identifier[isinstance] ( identifier[x] , identifier[type] )] keyword[for] identifier[d] keyword[in] identifier[reversed] ( identifier[dicts] ): keyword[for] identifier[k] , identifier[v] keyword[in] identifier[d] . identifier[items] (): keyword[if] identifier[k] keyword[not] keyword[in] identifier[values] : identifier[values] [ identifier[k] ]= identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[values] . identifier[items] (): identifier[SubInjector] . identifier[value] ( identifier[k] , identifier[v] ) keyword[return] identifier[SubInjector] ()
def sub(cls, *mixins_and_dicts, **values): """Create and instantiate a sub-injector. Mixins and local value dicts can be passed in as arguments. Local values can also be passed in as keyword arguments. """ class SubInjector(cls): pass mixins = [x for x in mixins_and_dicts if isinstance(x, type)] if mixins: SubInjector.__bases__ = tuple(mixins) + SubInjector.__bases__ # depends on [control=['if'], data=[]] dicts = [x for x in mixins_and_dicts if not isinstance(x, type)] for d in reversed(dicts): for (k, v) in d.items(): if k not in values: values[k] = v # depends on [control=['if'], data=['k', 'values']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['d']] for (k, v) in values.items(): SubInjector.value(k, v) # depends on [control=['for'], data=[]] return SubInjector()
def scale_0to1(image_in, exclude_outliers_below=False, exclude_outliers_above=False): """Scale the two images to [0, 1] based on min/max from both. Parameters ----------- image_in : ndarray Input image exclude_outliers_{below,above} : float Lower/upper limit, a value between 0 and 100. Returns ------- scaled_image : ndarray clipped and/or scaled image """ min_value = image_in.min() max_value = image_in.max() # making a copy to ensure no side-effects image = image_in.copy() if exclude_outliers_below: perctl = float(exclude_outliers_below) image[image < np.percentile(image, perctl)] = min_value if exclude_outliers_above: perctl = float(exclude_outliers_above) image[image > np.percentile(image, 100.0 - perctl)] = max_value image = (image - min_value) / (max_value - min_value) return image
def function[scale_0to1, parameter[image_in, exclude_outliers_below, exclude_outliers_above]]: constant[Scale the two images to [0, 1] based on min/max from both. Parameters ----------- image_in : ndarray Input image exclude_outliers_{below,above} : float Lower/upper limit, a value between 0 and 100. Returns ------- scaled_image : ndarray clipped and/or scaled image ] variable[min_value] assign[=] call[name[image_in].min, parameter[]] variable[max_value] assign[=] call[name[image_in].max, parameter[]] variable[image] assign[=] call[name[image_in].copy, parameter[]] if name[exclude_outliers_below] begin[:] variable[perctl] assign[=] call[name[float], parameter[name[exclude_outliers_below]]] call[name[image]][compare[name[image] less[<] call[name[np].percentile, parameter[name[image], name[perctl]]]]] assign[=] name[min_value] if name[exclude_outliers_above] begin[:] variable[perctl] assign[=] call[name[float], parameter[name[exclude_outliers_above]]] call[name[image]][compare[name[image] greater[>] call[name[np].percentile, parameter[name[image], binary_operation[constant[100.0] - name[perctl]]]]]] assign[=] name[max_value] variable[image] assign[=] binary_operation[binary_operation[name[image] - name[min_value]] / binary_operation[name[max_value] - name[min_value]]] return[name[image]]
keyword[def] identifier[scale_0to1] ( identifier[image_in] , identifier[exclude_outliers_below] = keyword[False] , identifier[exclude_outliers_above] = keyword[False] ): literal[string] identifier[min_value] = identifier[image_in] . identifier[min] () identifier[max_value] = identifier[image_in] . identifier[max] () identifier[image] = identifier[image_in] . identifier[copy] () keyword[if] identifier[exclude_outliers_below] : identifier[perctl] = identifier[float] ( identifier[exclude_outliers_below] ) identifier[image] [ identifier[image] < identifier[np] . identifier[percentile] ( identifier[image] , identifier[perctl] )]= identifier[min_value] keyword[if] identifier[exclude_outliers_above] : identifier[perctl] = identifier[float] ( identifier[exclude_outliers_above] ) identifier[image] [ identifier[image] > identifier[np] . identifier[percentile] ( identifier[image] , literal[int] - identifier[perctl] )]= identifier[max_value] identifier[image] =( identifier[image] - identifier[min_value] )/( identifier[max_value] - identifier[min_value] ) keyword[return] identifier[image]
def scale_0to1(image_in, exclude_outliers_below=False, exclude_outliers_above=False): """Scale the two images to [0, 1] based on min/max from both. Parameters ----------- image_in : ndarray Input image exclude_outliers_{below,above} : float Lower/upper limit, a value between 0 and 100. Returns ------- scaled_image : ndarray clipped and/or scaled image """ min_value = image_in.min() max_value = image_in.max() # making a copy to ensure no side-effects image = image_in.copy() if exclude_outliers_below: perctl = float(exclude_outliers_below) image[image < np.percentile(image, perctl)] = min_value # depends on [control=['if'], data=[]] if exclude_outliers_above: perctl = float(exclude_outliers_above) image[image > np.percentile(image, 100.0 - perctl)] = max_value # depends on [control=['if'], data=[]] image = (image - min_value) / (max_value - min_value) return image