code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def build_table(self, table, force=False): """Build all of the sources for a table """ sources = self._resolve_sources(None, [table]) for source in sources: self.build_source(None, source, force=force) self.unify_partitions()
def function[build_table, parameter[self, table, force]]: constant[Build all of the sources for a table ] variable[sources] assign[=] call[name[self]._resolve_sources, parameter[constant[None], list[[<ast.Name object at 0x7da18f58de70>]]]] for taget[name[source]] in starred[name[sources]] begin[:] call[name[self].build_source, parameter[constant[None], name[source]]] call[name[self].unify_partitions, parameter[]]
keyword[def] identifier[build_table] ( identifier[self] , identifier[table] , identifier[force] = keyword[False] ): literal[string] identifier[sources] = identifier[self] . identifier[_resolve_sources] ( keyword[None] ,[ identifier[table] ]) keyword[for] identifier[source] keyword[in] identifier[sources] : identifier[self] . identifier[build_source] ( keyword[None] , identifier[source] , identifier[force] = identifier[force] ) identifier[self] . identifier[unify_partitions] ()
def build_table(self, table, force=False): """Build all of the sources for a table """ sources = self._resolve_sources(None, [table]) for source in sources: self.build_source(None, source, force=force) # depends on [control=['for'], data=['source']] self.unify_partitions()
def _append_instruction(self, obj, qargs=None): """Update the current Operator by apply an instruction.""" if isinstance(obj, Instruction): chan = None if obj.name == 'reset': # For superoperator evolution we can simulate a reset as # a non-unitary supeorperator matrix chan = SuperOp( np.array([[1, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])) if obj.name == 'kraus': kraus = obj.params dim = len(kraus[0]) chan = SuperOp(_to_superop('Kraus', (kraus, None), dim, dim)) elif hasattr(obj, 'to_matrix'): # If instruction is a gate first we see if it has a # `to_matrix` definition and if so use that. try: kraus = [obj.to_matrix()] dim = len(kraus[0]) chan = SuperOp( _to_superop('Kraus', (kraus, None), dim, dim)) except QiskitError: pass if chan is not None: # Perform the composition and inplace update the current state # of the operator op = self.compose(chan, qargs=qargs) self._data = op.data else: # If the instruction doesn't have a matrix defined we use its # circuit decomposition definition if it exists, otherwise we # cannot compose this gate and raise an error. if obj.definition is None: raise QiskitError('Cannot apply Instruction: {}'.format( obj.name)) for instr, qregs, cregs in obj.definition: if cregs: raise QiskitError( 'Cannot apply instruction with classical registers: {}' .format(instr.name)) # Get the integer position of the flat register new_qargs = [tup[1] for tup in qregs] self._append_instruction(instr, qargs=new_qargs) else: raise QiskitError('Input is not an instruction.')
def function[_append_instruction, parameter[self, obj, qargs]]: constant[Update the current Operator by apply an instruction.] if call[name[isinstance], parameter[name[obj], name[Instruction]]] begin[:] variable[chan] assign[=] constant[None] if compare[name[obj].name equal[==] constant[reset]] begin[:] variable[chan] assign[=] call[name[SuperOp], parameter[call[name[np].array, parameter[list[[<ast.List object at 0x7da1b03942b0>, <ast.List object at 0x7da1b0394dc0>, <ast.List object at 0x7da1b0395ea0>, <ast.List object at 0x7da1b0395de0>]]]]]] if compare[name[obj].name equal[==] constant[kraus]] begin[:] variable[kraus] assign[=] name[obj].params variable[dim] assign[=] call[name[len], parameter[call[name[kraus]][constant[0]]]] variable[chan] assign[=] call[name[SuperOp], parameter[call[name[_to_superop], parameter[constant[Kraus], tuple[[<ast.Name object at 0x7da1b03952d0>, <ast.Constant object at 0x7da1b0395540>]], name[dim], name[dim]]]]] if compare[name[chan] is_not constant[None]] begin[:] variable[op] assign[=] call[name[self].compose, parameter[name[chan]]] name[self]._data assign[=] name[op].data
keyword[def] identifier[_append_instruction] ( identifier[self] , identifier[obj] , identifier[qargs] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[Instruction] ): identifier[chan] = keyword[None] keyword[if] identifier[obj] . identifier[name] == literal[string] : identifier[chan] = identifier[SuperOp] ( identifier[np] . identifier[array] ([[ literal[int] , literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] , literal[int] ], [ literal[int] , literal[int] , literal[int] , literal[int] ]])) keyword[if] identifier[obj] . identifier[name] == literal[string] : identifier[kraus] = identifier[obj] . identifier[params] identifier[dim] = identifier[len] ( identifier[kraus] [ literal[int] ]) identifier[chan] = identifier[SuperOp] ( identifier[_to_superop] ( literal[string] ,( identifier[kraus] , keyword[None] ), identifier[dim] , identifier[dim] )) keyword[elif] identifier[hasattr] ( identifier[obj] , literal[string] ): keyword[try] : identifier[kraus] =[ identifier[obj] . identifier[to_matrix] ()] identifier[dim] = identifier[len] ( identifier[kraus] [ literal[int] ]) identifier[chan] = identifier[SuperOp] ( identifier[_to_superop] ( literal[string] ,( identifier[kraus] , keyword[None] ), identifier[dim] , identifier[dim] )) keyword[except] identifier[QiskitError] : keyword[pass] keyword[if] identifier[chan] keyword[is] keyword[not] keyword[None] : identifier[op] = identifier[self] . identifier[compose] ( identifier[chan] , identifier[qargs] = identifier[qargs] ) identifier[self] . identifier[_data] = identifier[op] . identifier[data] keyword[else] : keyword[if] identifier[obj] . identifier[definition] keyword[is] keyword[None] : keyword[raise] identifier[QiskitError] ( literal[string] . identifier[format] ( identifier[obj] . identifier[name] )) keyword[for] identifier[instr] , identifier[qregs] , identifier[cregs] keyword[in] identifier[obj] . identifier[definition] : keyword[if] identifier[cregs] : keyword[raise] identifier[QiskitError] ( literal[string] . identifier[format] ( identifier[instr] . identifier[name] )) identifier[new_qargs] =[ identifier[tup] [ literal[int] ] keyword[for] identifier[tup] keyword[in] identifier[qregs] ] identifier[self] . identifier[_append_instruction] ( identifier[instr] , identifier[qargs] = identifier[new_qargs] ) keyword[else] : keyword[raise] identifier[QiskitError] ( literal[string] )
def _append_instruction(self, obj, qargs=None): """Update the current Operator by apply an instruction.""" if isinstance(obj, Instruction): chan = None if obj.name == 'reset': # For superoperator evolution we can simulate a reset as # a non-unitary supeorperator matrix chan = SuperOp(np.array([[1, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]])) # depends on [control=['if'], data=[]] if obj.name == 'kraus': kraus = obj.params dim = len(kraus[0]) chan = SuperOp(_to_superop('Kraus', (kraus, None), dim, dim)) # depends on [control=['if'], data=[]] elif hasattr(obj, 'to_matrix'): # If instruction is a gate first we see if it has a # `to_matrix` definition and if so use that. try: kraus = [obj.to_matrix()] dim = len(kraus[0]) chan = SuperOp(_to_superop('Kraus', (kraus, None), dim, dim)) # depends on [control=['try'], data=[]] except QiskitError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if chan is not None: # Perform the composition and inplace update the current state # of the operator op = self.compose(chan, qargs=qargs) self._data = op.data # depends on [control=['if'], data=['chan']] else: # If the instruction doesn't have a matrix defined we use its # circuit decomposition definition if it exists, otherwise we # cannot compose this gate and raise an error. if obj.definition is None: raise QiskitError('Cannot apply Instruction: {}'.format(obj.name)) # depends on [control=['if'], data=[]] for (instr, qregs, cregs) in obj.definition: if cregs: raise QiskitError('Cannot apply instruction with classical registers: {}'.format(instr.name)) # depends on [control=['if'], data=[]] # Get the integer position of the flat register new_qargs = [tup[1] for tup in qregs] self._append_instruction(instr, qargs=new_qargs) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: raise QiskitError('Input is not an instruction.')
def gcd(*numbers): """ Returns the greatest common divisor for a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers. """ n = numbers[0] for i in numbers: n = pygcd(n, i) return n
def function[gcd, parameter[]]: constant[ Returns the greatest common divisor for a sequence of numbers. Args: \*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers. ] variable[n] assign[=] call[name[numbers]][constant[0]] for taget[name[i]] in starred[name[numbers]] begin[:] variable[n] assign[=] call[name[pygcd], parameter[name[n], name[i]]] return[name[n]]
keyword[def] identifier[gcd] (* identifier[numbers] ): literal[string] identifier[n] = identifier[numbers] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[numbers] : identifier[n] = identifier[pygcd] ( identifier[n] , identifier[i] ) keyword[return] identifier[n]
def gcd(*numbers): """ Returns the greatest common divisor for a sequence of numbers. Args: \\*numbers: Sequence of numbers. Returns: (int) Greatest common divisor of numbers. """ n = numbers[0] for i in numbers: n = pygcd(n, i) # depends on [control=['for'], data=['i']] return n
def tokenize(self, s): """ Tokenizes the string. :param s: the string to tokenize :type s: str :return: the iterator :rtype: TokenIterator """ javabridge.call(self.jobject, "tokenize", "(Ljava/lang/String;)V", s) return TokenIterator(self)
def function[tokenize, parameter[self, s]]: constant[ Tokenizes the string. :param s: the string to tokenize :type s: str :return: the iterator :rtype: TokenIterator ] call[name[javabridge].call, parameter[name[self].jobject, constant[tokenize], constant[(Ljava/lang/String;)V], name[s]]] return[call[name[TokenIterator], parameter[name[self]]]]
keyword[def] identifier[tokenize] ( identifier[self] , identifier[s] ): literal[string] identifier[javabridge] . identifier[call] ( identifier[self] . identifier[jobject] , literal[string] , literal[string] , identifier[s] ) keyword[return] identifier[TokenIterator] ( identifier[self] )
def tokenize(self, s): """ Tokenizes the string. :param s: the string to tokenize :type s: str :return: the iterator :rtype: TokenIterator """ javabridge.call(self.jobject, 'tokenize', '(Ljava/lang/String;)V', s) return TokenIterator(self)
def marker(self, *args): """ Defines markers one at a time for your graph args are of the form:: <marker type>, <color>, <data set index>, <data point>, <size>, <priority> see the official developers doc for the complete spec APIPARAM: chm """ if len(args[0]) == 1: assert args[0] in MARKERS, 'Invalid marker type: %s'%args[0] assert len(args) <= 6, 'Incorrect arguments %s'%str(args) args = color_args(args, 1) self.markers.append(','.join(map(str,args)) ) return self
def function[marker, parameter[self]]: constant[ Defines markers one at a time for your graph args are of the form:: <marker type>, <color>, <data set index>, <data point>, <size>, <priority> see the official developers doc for the complete spec APIPARAM: chm ] if compare[call[name[len], parameter[call[name[args]][constant[0]]]] equal[==] constant[1]] begin[:] assert[compare[call[name[args]][constant[0]] in name[MARKERS]]] assert[compare[call[name[len], parameter[name[args]]] less_or_equal[<=] constant[6]]] variable[args] assign[=] call[name[color_args], parameter[name[args], constant[1]]] call[name[self].markers.append, parameter[call[constant[,].join, parameter[call[name[map], parameter[name[str], name[args]]]]]]] return[name[self]]
keyword[def] identifier[marker] ( identifier[self] ,* identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] [ literal[int] ])== literal[int] : keyword[assert] identifier[args] [ literal[int] ] keyword[in] identifier[MARKERS] , literal[string] % identifier[args] [ literal[int] ] keyword[assert] identifier[len] ( identifier[args] )<= literal[int] , literal[string] % identifier[str] ( identifier[args] ) identifier[args] = identifier[color_args] ( identifier[args] , literal[int] ) identifier[self] . identifier[markers] . identifier[append] ( literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[args] ))) keyword[return] identifier[self]
def marker(self, *args): """ Defines markers one at a time for your graph args are of the form:: <marker type>, <color>, <data set index>, <data point>, <size>, <priority> see the official developers doc for the complete spec APIPARAM: chm """ if len(args[0]) == 1: assert args[0] in MARKERS, 'Invalid marker type: %s' % args[0] # depends on [control=['if'], data=[]] assert len(args) <= 6, 'Incorrect arguments %s' % str(args) args = color_args(args, 1) self.markers.append(','.join(map(str, args))) return self
def return_net(word,word_net,depth=1): '''Creates a list of unique words that are at a provided depth from root word. @Args: -- word : root word from which the linked words should be returned. word_net : word network (dictionary of word instances)to be refered in this process. depth : depth to which this process must traverse and return words. @return: --- res : List of words that are within a certain depth from root word in network. ''' if depth<1: raise Exception(TAG+"Degree value error.range(1,~)") if depth==1: return list(word_net[word].frwrd_links) elif depth>1: words = word_net[word].frwrd_links res=[] for w in words: res.extend(return_net(w,word_net,depth=depth-1)) return list(set(res))
def function[return_net, parameter[word, word_net, depth]]: constant[Creates a list of unique words that are at a provided depth from root word. @Args: -- word : root word from which the linked words should be returned. word_net : word network (dictionary of word instances)to be refered in this process. depth : depth to which this process must traverse and return words. @return: --- res : List of words that are within a certain depth from root word in network. ] if compare[name[depth] less[<] constant[1]] begin[:] <ast.Raise object at 0x7da20c794a00> if compare[name[depth] equal[==] constant[1]] begin[:] return[call[name[list], parameter[call[name[word_net]][name[word]].frwrd_links]]]
keyword[def] identifier[return_net] ( identifier[word] , identifier[word_net] , identifier[depth] = literal[int] ): literal[string] keyword[if] identifier[depth] < literal[int] : keyword[raise] identifier[Exception] ( identifier[TAG] + literal[string] ) keyword[if] identifier[depth] == literal[int] : keyword[return] identifier[list] ( identifier[word_net] [ identifier[word] ]. identifier[frwrd_links] ) keyword[elif] identifier[depth] > literal[int] : identifier[words] = identifier[word_net] [ identifier[word] ]. identifier[frwrd_links] identifier[res] =[] keyword[for] identifier[w] keyword[in] identifier[words] : identifier[res] . identifier[extend] ( identifier[return_net] ( identifier[w] , identifier[word_net] , identifier[depth] = identifier[depth] - literal[int] )) keyword[return] identifier[list] ( identifier[set] ( identifier[res] ))
def return_net(word, word_net, depth=1): """Creates a list of unique words that are at a provided depth from root word. @Args: -- word : root word from which the linked words should be returned. word_net : word network (dictionary of word instances)to be refered in this process. depth : depth to which this process must traverse and return words. @return: --- res : List of words that are within a certain depth from root word in network. """ if depth < 1: raise Exception(TAG + 'Degree value error.range(1,~)') # depends on [control=['if'], data=[]] if depth == 1: return list(word_net[word].frwrd_links) # depends on [control=['if'], data=[]] elif depth > 1: words = word_net[word].frwrd_links res = [] for w in words: res.extend(return_net(w, word_net, depth=depth - 1)) # depends on [control=['for'], data=['w']] return list(set(res)) # depends on [control=['if'], data=['depth']]
def format_content_type_object(repo, content_type, uuid): """ Return a content object from a repository for a given content_type and uuid :param Repo repo: The git repository. :param str content_type: The content type to list :returns: dict """ try: storage_manager = StorageManager(repo) model_class = load_model_class(repo, content_type) return dict(storage_manager.get(model_class, uuid)) except GitCommandError: raise NotFound('Object does not exist.')
def function[format_content_type_object, parameter[repo, content_type, uuid]]: constant[ Return a content object from a repository for a given content_type and uuid :param Repo repo: The git repository. :param str content_type: The content type to list :returns: dict ] <ast.Try object at 0x7da1b14ab5e0>
keyword[def] identifier[format_content_type_object] ( identifier[repo] , identifier[content_type] , identifier[uuid] ): literal[string] keyword[try] : identifier[storage_manager] = identifier[StorageManager] ( identifier[repo] ) identifier[model_class] = identifier[load_model_class] ( identifier[repo] , identifier[content_type] ) keyword[return] identifier[dict] ( identifier[storage_manager] . identifier[get] ( identifier[model_class] , identifier[uuid] )) keyword[except] identifier[GitCommandError] : keyword[raise] identifier[NotFound] ( literal[string] )
def format_content_type_object(repo, content_type, uuid): """ Return a content object from a repository for a given content_type and uuid :param Repo repo: The git repository. :param str content_type: The content type to list :returns: dict """ try: storage_manager = StorageManager(repo) model_class = load_model_class(repo, content_type) return dict(storage_manager.get(model_class, uuid)) # depends on [control=['try'], data=[]] except GitCommandError: raise NotFound('Object does not exist.') # depends on [control=['except'], data=[]]
def iter_replace_strings(replacements): """Create a function that uses replacement pairs to process a string. The returned function takes an iterator and yields on each processed line. Args: replacements: Dict containing 'find_string': 'replace_string' pairs Returns: function with signature: iterator of strings = function(iterable) """ def function_iter_replace_strings(iterable_strings): """Yield a formatted string from iterable_strings using a generator. Args: iterable_strings: Iterable containing strings. E.g a file-like object. Returns: Yields formatted line. """ for string in iterable_strings: yield reduce((lambda s, kv: s.replace(*kv)), replacements.items(), string) return function_iter_replace_strings
def function[iter_replace_strings, parameter[replacements]]: constant[Create a function that uses replacement pairs to process a string. The returned function takes an iterator and yields on each processed line. Args: replacements: Dict containing 'find_string': 'replace_string' pairs Returns: function with signature: iterator of strings = function(iterable) ] def function[function_iter_replace_strings, parameter[iterable_strings]]: constant[Yield a formatted string from iterable_strings using a generator. Args: iterable_strings: Iterable containing strings. E.g a file-like object. Returns: Yields formatted line. ] for taget[name[string]] in starred[name[iterable_strings]] begin[:] <ast.Yield object at 0x7da20c990a00> return[name[function_iter_replace_strings]]
keyword[def] identifier[iter_replace_strings] ( identifier[replacements] ): literal[string] keyword[def] identifier[function_iter_replace_strings] ( identifier[iterable_strings] ): literal[string] keyword[for] identifier[string] keyword[in] identifier[iterable_strings] : keyword[yield] identifier[reduce] (( keyword[lambda] identifier[s] , identifier[kv] : identifier[s] . identifier[replace] (* identifier[kv] )), identifier[replacements] . identifier[items] (), identifier[string] ) keyword[return] identifier[function_iter_replace_strings]
def iter_replace_strings(replacements): """Create a function that uses replacement pairs to process a string. The returned function takes an iterator and yields on each processed line. Args: replacements: Dict containing 'find_string': 'replace_string' pairs Returns: function with signature: iterator of strings = function(iterable) """ def function_iter_replace_strings(iterable_strings): """Yield a formatted string from iterable_strings using a generator. Args: iterable_strings: Iterable containing strings. E.g a file-like object. Returns: Yields formatted line. """ for string in iterable_strings: yield reduce(lambda s, kv: s.replace(*kv), replacements.items(), string) # depends on [control=['for'], data=['string']] return function_iter_replace_strings
def _derive_checksum(self, s): """ Derive the checksum :param str s: Random string for which to derive the checksum """ checksum = hashlib.sha256(bytes(s, "ascii")).hexdigest() return checksum[:4]
def function[_derive_checksum, parameter[self, s]]: constant[ Derive the checksum :param str s: Random string for which to derive the checksum ] variable[checksum] assign[=] call[call[name[hashlib].sha256, parameter[call[name[bytes], parameter[name[s], constant[ascii]]]]].hexdigest, parameter[]] return[call[name[checksum]][<ast.Slice object at 0x7da18f09efe0>]]
keyword[def] identifier[_derive_checksum] ( identifier[self] , identifier[s] ): literal[string] identifier[checksum] = identifier[hashlib] . identifier[sha256] ( identifier[bytes] ( identifier[s] , literal[string] )). identifier[hexdigest] () keyword[return] identifier[checksum] [: literal[int] ]
def _derive_checksum(self, s): """ Derive the checksum :param str s: Random string for which to derive the checksum """ checksum = hashlib.sha256(bytes(s, 'ascii')).hexdigest() return checksum[:4]
def _process_trace(trace_file, index_file, trace, name, index): """Support function for coda_output(); writes output to files""" if ndim(trace) > 1: trace = swapaxes(trace, 0, 1) for i, seq in enumerate(trace): _name = '%s_%s' % (name, i) index = _process_trace(trace_file, index_file, seq, _name, index) else: index_buffer = '%s\t%s\t' % (name, index) for i, val in enumerate(trace): trace_file.write('%s\t%s\r\n' % (i + 1, val)) index += 1 index_file.write('%s%s\r\n' % (index_buffer, index - 1)) return index
def function[_process_trace, parameter[trace_file, index_file, trace, name, index]]: constant[Support function for coda_output(); writes output to files] if compare[call[name[ndim], parameter[name[trace]]] greater[>] constant[1]] begin[:] variable[trace] assign[=] call[name[swapaxes], parameter[name[trace], constant[0], constant[1]]] for taget[tuple[[<ast.Name object at 0x7da1b17a9c00>, <ast.Name object at 0x7da1b17ab550>]]] in starred[call[name[enumerate], parameter[name[trace]]]] begin[:] variable[_name] assign[=] binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b17a9690>, <ast.Name object at 0x7da1b17aad40>]]] variable[index] assign[=] call[name[_process_trace], parameter[name[trace_file], name[index_file], name[seq], name[_name], name[index]]] return[name[index]]
keyword[def] identifier[_process_trace] ( identifier[trace_file] , identifier[index_file] , identifier[trace] , identifier[name] , identifier[index] ): literal[string] keyword[if] identifier[ndim] ( identifier[trace] )> literal[int] : identifier[trace] = identifier[swapaxes] ( identifier[trace] , literal[int] , literal[int] ) keyword[for] identifier[i] , identifier[seq] keyword[in] identifier[enumerate] ( identifier[trace] ): identifier[_name] = literal[string] %( identifier[name] , identifier[i] ) identifier[index] = identifier[_process_trace] ( identifier[trace_file] , identifier[index_file] , identifier[seq] , identifier[_name] , identifier[index] ) keyword[else] : identifier[index_buffer] = literal[string] %( identifier[name] , identifier[index] ) keyword[for] identifier[i] , identifier[val] keyword[in] identifier[enumerate] ( identifier[trace] ): identifier[trace_file] . identifier[write] ( literal[string] %( identifier[i] + literal[int] , identifier[val] )) identifier[index] += literal[int] identifier[index_file] . identifier[write] ( literal[string] %( identifier[index_buffer] , identifier[index] - literal[int] )) keyword[return] identifier[index]
def _process_trace(trace_file, index_file, trace, name, index): """Support function for coda_output(); writes output to files""" if ndim(trace) > 1: trace = swapaxes(trace, 0, 1) for (i, seq) in enumerate(trace): _name = '%s_%s' % (name, i) index = _process_trace(trace_file, index_file, seq, _name, index) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: index_buffer = '%s\t%s\t' % (name, index) for (i, val) in enumerate(trace): trace_file.write('%s\t%s\r\n' % (i + 1, val)) index += 1 # depends on [control=['for'], data=[]] index_file.write('%s%s\r\n' % (index_buffer, index - 1)) return index
def findRoleID(self, name): """searches the roles by name and returns the role's ID""" for r in self: if r['name'].lower() == name.lower(): return r['id'] del r return None
def function[findRoleID, parameter[self, name]]: constant[searches the roles by name and returns the role's ID] for taget[name[r]] in starred[name[self]] begin[:] if compare[call[call[name[r]][constant[name]].lower, parameter[]] equal[==] call[name[name].lower, parameter[]]] begin[:] return[call[name[r]][constant[id]]] <ast.Delete object at 0x7da1b12f1150> return[constant[None]]
keyword[def] identifier[findRoleID] ( identifier[self] , identifier[name] ): literal[string] keyword[for] identifier[r] keyword[in] identifier[self] : keyword[if] identifier[r] [ literal[string] ]. identifier[lower] ()== identifier[name] . identifier[lower] (): keyword[return] identifier[r] [ literal[string] ] keyword[del] identifier[r] keyword[return] keyword[None]
def findRoleID(self, name): """searches the roles by name and returns the role's ID""" for r in self: if r['name'].lower() == name.lower(): return r['id'] # depends on [control=['if'], data=[]] del r # depends on [control=['for'], data=['r']] return None
def delete_zone(zone_id, profile): ''' Delete a zone. :param zone_id: Zone to delete. :type zone_id: ``str`` :param profile: The profile key :type profile: ``str`` :rtype: ``bool`` CLI Example: .. code-block:: bash salt myminion libcloud_dns.delete_zone google.com profile1 ''' conn = _get_driver(profile=profile) zone = conn.get_zone(zone_id=zone_id) return conn.delete_zone(zone)
def function[delete_zone, parameter[zone_id, profile]]: constant[ Delete a zone. :param zone_id: Zone to delete. :type zone_id: ``str`` :param profile: The profile key :type profile: ``str`` :rtype: ``bool`` CLI Example: .. code-block:: bash salt myminion libcloud_dns.delete_zone google.com profile1 ] variable[conn] assign[=] call[name[_get_driver], parameter[]] variable[zone] assign[=] call[name[conn].get_zone, parameter[]] return[call[name[conn].delete_zone, parameter[name[zone]]]]
keyword[def] identifier[delete_zone] ( identifier[zone_id] , identifier[profile] ): literal[string] identifier[conn] = identifier[_get_driver] ( identifier[profile] = identifier[profile] ) identifier[zone] = identifier[conn] . identifier[get_zone] ( identifier[zone_id] = identifier[zone_id] ) keyword[return] identifier[conn] . identifier[delete_zone] ( identifier[zone] )
def delete_zone(zone_id, profile): """ Delete a zone. :param zone_id: Zone to delete. :type zone_id: ``str`` :param profile: The profile key :type profile: ``str`` :rtype: ``bool`` CLI Example: .. code-block:: bash salt myminion libcloud_dns.delete_zone google.com profile1 """ conn = _get_driver(profile=profile) zone = conn.get_zone(zone_id=zone_id) return conn.delete_zone(zone)
def read(self, url): """Read storage at a given url""" (store_name, path) = self._split_url(url) adapter = self._create_adapter(store_name) lines = [] with adapter.open(path) as f: for line in f: lines.append(line.decode()) return lines
def function[read, parameter[self, url]]: constant[Read storage at a given url] <ast.Tuple object at 0x7da1b1b03820> assign[=] call[name[self]._split_url, parameter[name[url]]] variable[adapter] assign[=] call[name[self]._create_adapter, parameter[name[store_name]]] variable[lines] assign[=] list[[]] with call[name[adapter].open, parameter[name[path]]] begin[:] for taget[name[line]] in starred[name[f]] begin[:] call[name[lines].append, parameter[call[name[line].decode, parameter[]]]] return[name[lines]]
keyword[def] identifier[read] ( identifier[self] , identifier[url] ): literal[string] ( identifier[store_name] , identifier[path] )= identifier[self] . identifier[_split_url] ( identifier[url] ) identifier[adapter] = identifier[self] . identifier[_create_adapter] ( identifier[store_name] ) identifier[lines] =[] keyword[with] identifier[adapter] . identifier[open] ( identifier[path] ) keyword[as] identifier[f] : keyword[for] identifier[line] keyword[in] identifier[f] : identifier[lines] . identifier[append] ( identifier[line] . identifier[decode] ()) keyword[return] identifier[lines]
def read(self, url): """Read storage at a given url""" (store_name, path) = self._split_url(url) adapter = self._create_adapter(store_name) lines = [] with adapter.open(path) as f: for line in f: lines.append(line.decode()) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']] return lines
def sort_dict(unsorted_dict): """ Return a OrderedDict ordered by key names from the :unsorted_dict: """ sorted_dict = OrderedDict() # sort items before inserting them into a dict for key, value in sorted(unsorted_dict.items(), key=itemgetter(0)): sorted_dict[key] = value return sorted_dict
def function[sort_dict, parameter[unsorted_dict]]: constant[ Return a OrderedDict ordered by key names from the :unsorted_dict: ] variable[sorted_dict] assign[=] call[name[OrderedDict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0fe56f0>, <ast.Name object at 0x7da1b0fe7190>]]] in starred[call[name[sorted], parameter[call[name[unsorted_dict].items, parameter[]]]]] begin[:] call[name[sorted_dict]][name[key]] assign[=] name[value] return[name[sorted_dict]]
keyword[def] identifier[sort_dict] ( identifier[unsorted_dict] ): literal[string] identifier[sorted_dict] = identifier[OrderedDict] () keyword[for] identifier[key] , identifier[value] keyword[in] identifier[sorted] ( identifier[unsorted_dict] . identifier[items] (), identifier[key] = identifier[itemgetter] ( literal[int] )): identifier[sorted_dict] [ identifier[key] ]= identifier[value] keyword[return] identifier[sorted_dict]
def sort_dict(unsorted_dict): """ Return a OrderedDict ordered by key names from the :unsorted_dict: """ sorted_dict = OrderedDict() # sort items before inserting them into a dict for (key, value) in sorted(unsorted_dict.items(), key=itemgetter(0)): sorted_dict[key] = value # depends on [control=['for'], data=[]] return sorted_dict
def find(self, package, **kwargs): """ Find a package using package finders. Return the first package found. Args: package (str): package to find. **kwargs (): additional keyword arguments used by finders. Returns: PackageSpec: if package found, else None """ for finder in self.finders: package_spec = finder.find(package, **kwargs) if package_spec: return package_spec return None
def function[find, parameter[self, package]]: constant[ Find a package using package finders. Return the first package found. Args: package (str): package to find. **kwargs (): additional keyword arguments used by finders. Returns: PackageSpec: if package found, else None ] for taget[name[finder]] in starred[name[self].finders] begin[:] variable[package_spec] assign[=] call[name[finder].find, parameter[name[package]]] if name[package_spec] begin[:] return[name[package_spec]] return[constant[None]]
keyword[def] identifier[find] ( identifier[self] , identifier[package] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[finder] keyword[in] identifier[self] . identifier[finders] : identifier[package_spec] = identifier[finder] . identifier[find] ( identifier[package] ,** identifier[kwargs] ) keyword[if] identifier[package_spec] : keyword[return] identifier[package_spec] keyword[return] keyword[None]
def find(self, package, **kwargs): """ Find a package using package finders. Return the first package found. Args: package (str): package to find. **kwargs (): additional keyword arguments used by finders. Returns: PackageSpec: if package found, else None """ for finder in self.finders: package_spec = finder.find(package, **kwargs) if package_spec: return package_spec # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['finder']] return None
def read(sensor): """ distance of object in front of sensor in CM. """ import time import RPi.GPIO as GPIO # Disable any warning message such as GPIO pins in use GPIO.setwarnings(False) # use the values of the GPIO pins, and not the actual pin number # so if you connect to GPIO 25 which is on pin number 22, the # reference in this code is 25, which is the number of the GPIO # port and not the number of the physical pin GPIO.setmode(GPIO.BCM) if sensor.gpio_in is 0: raise RuntimeError('gpio_in, gpio_out attribute of Sensor object must be assigned before calling read') else: gpio_in = sensor.gpio_in gpio_out = sensor.gpio_out # point the software to the GPIO pins the sensor is using # change these values to the pins you are using # GPIO output = the pin that's connected to "Trig" on the sensor # GPIO input = the pin that's connected to "Echo" on the sensor GPIO.setup(gpio_out, GPIO.OUT) GPIO.setup(gpio_in, GPIO.IN) GPIO.output(gpio_out, GPIO.LOW) # found that the sensor can crash if there isn't a delay here # no idea why. If you have odd crashing issues, increase delay time.sleep(0.3) # sensor manual says a pulse ength of 10Us will trigger the # sensor to transmit 8 cycles of ultrasonic burst at 40kHz and # wait for the reflected ultrasonic burst to be received # to get a pulse length of 10Us we need to start the pulse, then # wait for 10 microseconds, then stop the pulse. This will # result in the pulse length being 10Us. # start the pulse on the GPIO pin # change this value to the pin you are using # GPIO output = the pin that's connected to "Trig" on the sensor GPIO.output(gpio_out, True) # wait 10 micro seconds (this is 0.00001 seconds) so the pulse # length is 10Us as the sensor expects time.sleep(0.00001) # stop the pulse after the time above has passed # change this value to the pin you are using # GPIO output = the pin that's connected to "Trig" on the sensor GPIO.output(gpio_out, False) # listen to the input pin. 0 means nothing is happening. Once a # signal is received the value will be 1 so the while loop # stops and has the last recorded time the signal was 0 # change this value to the pin you are using # GPIO input = the pin that's connected to "Echo" on the sensor while GPIO.input(gpio_in) == 0: signaloff = time.time() # listen to the input pin. Once a signal is received, record the # time the signal came through # change this value to the pin you are using # GPIO input = the pin that's connected to "Echo" on the sensor while GPIO.input(gpio_in) == 1: signalon = time.time() # work out the difference in the two recorded times above to # calculate the distance of an object in front of the sensor timepassed = signalon - signaloff # we now have our distance but it's not in a useful unit of # measurement. So now we convert this distance into centimetres distance = timepassed * 17000 # we're no longer using the GPIO, so tell software we're done GPIO.cleanup() return distance
def function[read, parameter[sensor]]: constant[ distance of object in front of sensor in CM. ] import module[time] import module[RPi.GPIO] as alias[GPIO] call[name[GPIO].setwarnings, parameter[constant[False]]] call[name[GPIO].setmode, parameter[name[GPIO].BCM]] if compare[name[sensor].gpio_in is constant[0]] begin[:] <ast.Raise object at 0x7da18f723640>
keyword[def] identifier[read] ( identifier[sensor] ): literal[string] keyword[import] identifier[time] keyword[import] identifier[RPi] . identifier[GPIO] keyword[as] identifier[GPIO] identifier[GPIO] . identifier[setwarnings] ( keyword[False] ) identifier[GPIO] . identifier[setmode] ( identifier[GPIO] . identifier[BCM] ) keyword[if] identifier[sensor] . identifier[gpio_in] keyword[is] literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[else] : identifier[gpio_in] = identifier[sensor] . identifier[gpio_in] identifier[gpio_out] = identifier[sensor] . identifier[gpio_out] identifier[GPIO] . identifier[setup] ( identifier[gpio_out] , identifier[GPIO] . identifier[OUT] ) identifier[GPIO] . identifier[setup] ( identifier[gpio_in] , identifier[GPIO] . identifier[IN] ) identifier[GPIO] . identifier[output] ( identifier[gpio_out] , identifier[GPIO] . identifier[LOW] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[GPIO] . identifier[output] ( identifier[gpio_out] , keyword[True] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[GPIO] . identifier[output] ( identifier[gpio_out] , keyword[False] ) keyword[while] identifier[GPIO] . identifier[input] ( identifier[gpio_in] )== literal[int] : identifier[signaloff] = identifier[time] . identifier[time] () keyword[while] identifier[GPIO] . identifier[input] ( identifier[gpio_in] )== literal[int] : identifier[signalon] = identifier[time] . identifier[time] () identifier[timepassed] = identifier[signalon] - identifier[signaloff] identifier[distance] = identifier[timepassed] * literal[int] identifier[GPIO] . identifier[cleanup] () keyword[return] identifier[distance]
def read(sensor): """ distance of object in front of sensor in CM. """ import time import RPi.GPIO as GPIO # Disable any warning message such as GPIO pins in use GPIO.setwarnings(False) # use the values of the GPIO pins, and not the actual pin number # so if you connect to GPIO 25 which is on pin number 22, the # reference in this code is 25, which is the number of the GPIO # port and not the number of the physical pin GPIO.setmode(GPIO.BCM) if sensor.gpio_in is 0: raise RuntimeError('gpio_in, gpio_out attribute of Sensor object must be assigned before calling read') # depends on [control=['if'], data=[]] else: gpio_in = sensor.gpio_in gpio_out = sensor.gpio_out # point the software to the GPIO pins the sensor is using # change these values to the pins you are using # GPIO output = the pin that's connected to "Trig" on the sensor # GPIO input = the pin that's connected to "Echo" on the sensor GPIO.setup(gpio_out, GPIO.OUT) GPIO.setup(gpio_in, GPIO.IN) GPIO.output(gpio_out, GPIO.LOW) # found that the sensor can crash if there isn't a delay here # no idea why. If you have odd crashing issues, increase delay time.sleep(0.3) # sensor manual says a pulse ength of 10Us will trigger the # sensor to transmit 8 cycles of ultrasonic burst at 40kHz and # wait for the reflected ultrasonic burst to be received # to get a pulse length of 10Us we need to start the pulse, then # wait for 10 microseconds, then stop the pulse. This will # result in the pulse length being 10Us. # start the pulse on the GPIO pin # change this value to the pin you are using # GPIO output = the pin that's connected to "Trig" on the sensor GPIO.output(gpio_out, True) # wait 10 micro seconds (this is 0.00001 seconds) so the pulse # length is 10Us as the sensor expects time.sleep(1e-05) # stop the pulse after the time above has passed # change this value to the pin you are using # GPIO output = the pin that's connected to "Trig" on the sensor GPIO.output(gpio_out, False) # listen to the input pin. 0 means nothing is happening. Once a # signal is received the value will be 1 so the while loop # stops and has the last recorded time the signal was 0 # change this value to the pin you are using # GPIO input = the pin that's connected to "Echo" on the sensor while GPIO.input(gpio_in) == 0: signaloff = time.time() # depends on [control=['while'], data=[]] # listen to the input pin. Once a signal is received, record the # time the signal came through # change this value to the pin you are using # GPIO input = the pin that's connected to "Echo" on the sensor while GPIO.input(gpio_in) == 1: signalon = time.time() # depends on [control=['while'], data=[]] # work out the difference in the two recorded times above to # calculate the distance of an object in front of the sensor timepassed = signalon - signaloff # we now have our distance but it's not in a useful unit of # measurement. So now we convert this distance into centimetres distance = timepassed * 17000 # we're no longer using the GPIO, so tell software we're done GPIO.cleanup() return distance
def local_expiring_lru(obj): """ Property that maps to a key in a local dict-like attribute. self._cache must be an OrderedDict self._cache_size must be defined as LRU size self._cache_ttl is the expiration time in seconds .. class Foo(object): def __init__(self, cache_size=5000, cache_ttl=600): self._cache = OrderedDict() self._cache_size = cache_size self._cache_ttl = cache_ttl @local_expiring_lru def expensive_meth(self, arg): pass .. """ @wraps(obj) def memoizer(*args, **kwargs): instance = args[0] lru_size = instance._cache_size cache_ttl = instance._cache_ttl if lru_size and cache_ttl: cache = instance._cache kargs = list(args) kargs[0] = id(instance) key = str((kargs, kwargs)) try: r = list(cache.pop(key)) if r[1] < datetime.datetime.utcnow(): r[0] = None else: cache[key] = r except (KeyError, AssertionError): if len(cache) >= lru_size: cache.popitem(last=False) r = cache[key] = ( obj(*args, **kwargs), datetime.datetime.utcnow() + datetime.timedelta( seconds=cache_ttl) ) if r[0]: return r[0] return obj(*args, **kwargs) return memoizer
def function[local_expiring_lru, parameter[obj]]: constant[ Property that maps to a key in a local dict-like attribute. self._cache must be an OrderedDict self._cache_size must be defined as LRU size self._cache_ttl is the expiration time in seconds .. class Foo(object): def __init__(self, cache_size=5000, cache_ttl=600): self._cache = OrderedDict() self._cache_size = cache_size self._cache_ttl = cache_ttl @local_expiring_lru def expensive_meth(self, arg): pass .. ] def function[memoizer, parameter[]]: variable[instance] assign[=] call[name[args]][constant[0]] variable[lru_size] assign[=] name[instance]._cache_size variable[cache_ttl] assign[=] name[instance]._cache_ttl if <ast.BoolOp object at 0x7da1b10d5270> begin[:] variable[cache] assign[=] name[instance]._cache variable[kargs] assign[=] call[name[list], parameter[name[args]]] call[name[kargs]][constant[0]] assign[=] call[name[id], parameter[name[instance]]] variable[key] assign[=] call[name[str], parameter[tuple[[<ast.Name object at 0x7da1b10d7cd0>, <ast.Name object at 0x7da1b10d7bb0>]]]] <ast.Try object at 0x7da1b10d4a30> if call[name[r]][constant[0]] begin[:] return[call[name[r]][constant[0]]] return[call[name[obj], parameter[<ast.Starred object at 0x7da1b10efe20>]]] return[name[memoizer]]
keyword[def] identifier[local_expiring_lru] ( identifier[obj] ): literal[string] @ identifier[wraps] ( identifier[obj] ) keyword[def] identifier[memoizer] (* identifier[args] ,** identifier[kwargs] ): identifier[instance] = identifier[args] [ literal[int] ] identifier[lru_size] = identifier[instance] . identifier[_cache_size] identifier[cache_ttl] = identifier[instance] . identifier[_cache_ttl] keyword[if] identifier[lru_size] keyword[and] identifier[cache_ttl] : identifier[cache] = identifier[instance] . identifier[_cache] identifier[kargs] = identifier[list] ( identifier[args] ) identifier[kargs] [ literal[int] ]= identifier[id] ( identifier[instance] ) identifier[key] = identifier[str] (( identifier[kargs] , identifier[kwargs] )) keyword[try] : identifier[r] = identifier[list] ( identifier[cache] . identifier[pop] ( identifier[key] )) keyword[if] identifier[r] [ literal[int] ]< identifier[datetime] . identifier[datetime] . identifier[utcnow] (): identifier[r] [ literal[int] ]= keyword[None] keyword[else] : identifier[cache] [ identifier[key] ]= identifier[r] keyword[except] ( identifier[KeyError] , identifier[AssertionError] ): keyword[if] identifier[len] ( identifier[cache] )>= identifier[lru_size] : identifier[cache] . identifier[popitem] ( identifier[last] = keyword[False] ) identifier[r] = identifier[cache] [ identifier[key] ]=( identifier[obj] (* identifier[args] ,** identifier[kwargs] ), identifier[datetime] . identifier[datetime] . identifier[utcnow] ()+ identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[cache_ttl] ) ) keyword[if] identifier[r] [ literal[int] ]: keyword[return] identifier[r] [ literal[int] ] keyword[return] identifier[obj] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[memoizer]
def local_expiring_lru(obj): """ Property that maps to a key in a local dict-like attribute. self._cache must be an OrderedDict self._cache_size must be defined as LRU size self._cache_ttl is the expiration time in seconds .. class Foo(object): def __init__(self, cache_size=5000, cache_ttl=600): self._cache = OrderedDict() self._cache_size = cache_size self._cache_ttl = cache_ttl @local_expiring_lru def expensive_meth(self, arg): pass .. """ @wraps(obj) def memoizer(*args, **kwargs): instance = args[0] lru_size = instance._cache_size cache_ttl = instance._cache_ttl if lru_size and cache_ttl: cache = instance._cache kargs = list(args) kargs[0] = id(instance) key = str((kargs, kwargs)) try: r = list(cache.pop(key)) if r[1] < datetime.datetime.utcnow(): r[0] = None # depends on [control=['if'], data=[]] else: cache[key] = r # depends on [control=['try'], data=[]] except (KeyError, AssertionError): if len(cache) >= lru_size: cache.popitem(last=False) # depends on [control=['if'], data=[]] r = cache[key] = (obj(*args, **kwargs), datetime.datetime.utcnow() + datetime.timedelta(seconds=cache_ttl)) # depends on [control=['except'], data=[]] if r[0]: return r[0] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return obj(*args, **kwargs) return memoizer
def on_open(self, ws): """Websocket on_open event handler""" def keep_alive(interval): while True: time.sleep(interval) self.ping() start_new_thread(keep_alive, (self.keep_alive_interval, ))
def function[on_open, parameter[self, ws]]: constant[Websocket on_open event handler] def function[keep_alive, parameter[interval]]: while constant[True] begin[:] call[name[time].sleep, parameter[name[interval]]] call[name[self].ping, parameter[]] call[name[start_new_thread], parameter[name[keep_alive], tuple[[<ast.Attribute object at 0x7da1b234b820>]]]]
keyword[def] identifier[on_open] ( identifier[self] , identifier[ws] ): literal[string] keyword[def] identifier[keep_alive] ( identifier[interval] ): keyword[while] keyword[True] : identifier[time] . identifier[sleep] ( identifier[interval] ) identifier[self] . identifier[ping] () identifier[start_new_thread] ( identifier[keep_alive] ,( identifier[self] . identifier[keep_alive_interval] ,))
def on_open(self, ws): """Websocket on_open event handler""" def keep_alive(interval): while True: time.sleep(interval) self.ping() # depends on [control=['while'], data=[]] start_new_thread(keep_alive, (self.keep_alive_interval,))
def apply_transformation(self, structure, return_ranked_list=False): """ Apply the transformation. Args: structure: input structure return_ranked_list (bool/int): Boolean stating whether or not multiple structures are returned. If return_ranked_list is an int, that number of structures is returned. Returns: Depending on returned_ranked list, either a transformed structure or a list of dictionaries, where each dictionary is of the form {"structure" = .... , "other_arguments"} the key "transformation" is reserved for the transformation that was actually applied to the structure. This transformation is parsed by the alchemy classes for generating a more specific transformation history. Any other information will be stored in the transformation_parameters dictionary in the transmuted structure class. """ sp = get_el_sp(self.specie_to_remove) specie_indices = [i for i in range(len(structure)) if structure[i].species == Composition({sp: 1})] trans = PartialRemoveSitesTransformation([specie_indices], [self.fraction_to_remove], algo=self.algo) return trans.apply_transformation(structure, return_ranked_list)
def function[apply_transformation, parameter[self, structure, return_ranked_list]]: constant[ Apply the transformation. Args: structure: input structure return_ranked_list (bool/int): Boolean stating whether or not multiple structures are returned. If return_ranked_list is an int, that number of structures is returned. Returns: Depending on returned_ranked list, either a transformed structure or a list of dictionaries, where each dictionary is of the form {"structure" = .... , "other_arguments"} the key "transformation" is reserved for the transformation that was actually applied to the structure. This transformation is parsed by the alchemy classes for generating a more specific transformation history. Any other information will be stored in the transformation_parameters dictionary in the transmuted structure class. ] variable[sp] assign[=] call[name[get_el_sp], parameter[name[self].specie_to_remove]] variable[specie_indices] assign[=] <ast.ListComp object at 0x7da207f03460> variable[trans] assign[=] call[name[PartialRemoveSitesTransformation], parameter[list[[<ast.Name object at 0x7da20c6e5600>]], list[[<ast.Attribute object at 0x7da20c6e6530>]]]] return[call[name[trans].apply_transformation, parameter[name[structure], name[return_ranked_list]]]]
keyword[def] identifier[apply_transformation] ( identifier[self] , identifier[structure] , identifier[return_ranked_list] = keyword[False] ): literal[string] identifier[sp] = identifier[get_el_sp] ( identifier[self] . identifier[specie_to_remove] ) identifier[specie_indices] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[structure] )) keyword[if] identifier[structure] [ identifier[i] ]. identifier[species] == identifier[Composition] ({ identifier[sp] : literal[int] })] identifier[trans] = identifier[PartialRemoveSitesTransformation] ([ identifier[specie_indices] ], [ identifier[self] . identifier[fraction_to_remove] ], identifier[algo] = identifier[self] . identifier[algo] ) keyword[return] identifier[trans] . identifier[apply_transformation] ( identifier[structure] , identifier[return_ranked_list] )
def apply_transformation(self, structure, return_ranked_list=False): """ Apply the transformation. Args: structure: input structure return_ranked_list (bool/int): Boolean stating whether or not multiple structures are returned. If return_ranked_list is an int, that number of structures is returned. Returns: Depending on returned_ranked list, either a transformed structure or a list of dictionaries, where each dictionary is of the form {"structure" = .... , "other_arguments"} the key "transformation" is reserved for the transformation that was actually applied to the structure. This transformation is parsed by the alchemy classes for generating a more specific transformation history. Any other information will be stored in the transformation_parameters dictionary in the transmuted structure class. """ sp = get_el_sp(self.specie_to_remove) specie_indices = [i for i in range(len(structure)) if structure[i].species == Composition({sp: 1})] trans = PartialRemoveSitesTransformation([specie_indices], [self.fraction_to_remove], algo=self.algo) return trans.apply_transformation(structure, return_ranked_list)
def _download_and_clean_file(filename, url): """Downloads data from url, and makes changes to match the CSV format.""" temp_file, _ = urllib.request.urlretrieve(url) with tf.gfile.Open(temp_file, 'r') as temp_eval_file: with tf.gfile.Open(filename, 'w') as eval_file: for line in temp_eval_file: line = line.strip() line = line.replace(', ', ',') if not line or ',' not in line: continue if line[-1] == '.': line = line[:-1] line += '\n' eval_file.write(line) tf.gfile.Remove(temp_file)
def function[_download_and_clean_file, parameter[filename, url]]: constant[Downloads data from url, and makes changes to match the CSV format.] <ast.Tuple object at 0x7da204620c40> assign[=] call[name[urllib].request.urlretrieve, parameter[name[url]]] with call[name[tf].gfile.Open, parameter[name[temp_file], constant[r]]] begin[:] with call[name[tf].gfile.Open, parameter[name[filename], constant[w]]] begin[:] for taget[name[line]] in starred[name[temp_eval_file]] begin[:] variable[line] assign[=] call[name[line].strip, parameter[]] variable[line] assign[=] call[name[line].replace, parameter[constant[, ], constant[,]]] if <ast.BoolOp object at 0x7da20e957760> begin[:] continue if compare[call[name[line]][<ast.UnaryOp object at 0x7da20e9556f0>] equal[==] constant[.]] begin[:] variable[line] assign[=] call[name[line]][<ast.Slice object at 0x7da20e954790>] <ast.AugAssign object at 0x7da20e954400> call[name[eval_file].write, parameter[name[line]]] call[name[tf].gfile.Remove, parameter[name[temp_file]]]
keyword[def] identifier[_download_and_clean_file] ( identifier[filename] , identifier[url] ): literal[string] identifier[temp_file] , identifier[_] = identifier[urllib] . identifier[request] . identifier[urlretrieve] ( identifier[url] ) keyword[with] identifier[tf] . identifier[gfile] . identifier[Open] ( identifier[temp_file] , literal[string] ) keyword[as] identifier[temp_eval_file] : keyword[with] identifier[tf] . identifier[gfile] . identifier[Open] ( identifier[filename] , literal[string] ) keyword[as] identifier[eval_file] : keyword[for] identifier[line] keyword[in] identifier[temp_eval_file] : identifier[line] = identifier[line] . identifier[strip] () identifier[line] = identifier[line] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] keyword[not] identifier[line] keyword[or] literal[string] keyword[not] keyword[in] identifier[line] : keyword[continue] keyword[if] identifier[line] [- literal[int] ]== literal[string] : identifier[line] = identifier[line] [:- literal[int] ] identifier[line] += literal[string] identifier[eval_file] . identifier[write] ( identifier[line] ) identifier[tf] . identifier[gfile] . identifier[Remove] ( identifier[temp_file] )
def _download_and_clean_file(filename, url): """Downloads data from url, and makes changes to match the CSV format.""" (temp_file, _) = urllib.request.urlretrieve(url) with tf.gfile.Open(temp_file, 'r') as temp_eval_file: with tf.gfile.Open(filename, 'w') as eval_file: for line in temp_eval_file: line = line.strip() line = line.replace(', ', ',') if not line or ',' not in line: continue # depends on [control=['if'], data=[]] if line[-1] == '.': line = line[:-1] # depends on [control=['if'], data=[]] line += '\n' eval_file.write(line) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['eval_file']] # depends on [control=['with'], data=['temp_eval_file']] tf.gfile.Remove(temp_file)
def list_kinesis_applications(region, filter_by_kwargs): """List all the kinesis applications along with the shards for each stream""" conn = boto.kinesis.connect_to_region(region) streams = conn.list_streams()['StreamNames'] kinesis_streams = {} for stream_name in streams: shard_ids = [] shards = conn.describe_stream(stream_name)['StreamDescription']['Shards'] for shard in shards: shard_ids.append(shard['ShardId']) kinesis_streams[stream_name] = shard_ids return kinesis_streams
def function[list_kinesis_applications, parameter[region, filter_by_kwargs]]: constant[List all the kinesis applications along with the shards for each stream] variable[conn] assign[=] call[name[boto].kinesis.connect_to_region, parameter[name[region]]] variable[streams] assign[=] call[call[name[conn].list_streams, parameter[]]][constant[StreamNames]] variable[kinesis_streams] assign[=] dictionary[[], []] for taget[name[stream_name]] in starred[name[streams]] begin[:] variable[shard_ids] assign[=] list[[]] variable[shards] assign[=] call[call[call[name[conn].describe_stream, parameter[name[stream_name]]]][constant[StreamDescription]]][constant[Shards]] for taget[name[shard]] in starred[name[shards]] begin[:] call[name[shard_ids].append, parameter[call[name[shard]][constant[ShardId]]]] call[name[kinesis_streams]][name[stream_name]] assign[=] name[shard_ids] return[name[kinesis_streams]]
keyword[def] identifier[list_kinesis_applications] ( identifier[region] , identifier[filter_by_kwargs] ): literal[string] identifier[conn] = identifier[boto] . identifier[kinesis] . identifier[connect_to_region] ( identifier[region] ) identifier[streams] = identifier[conn] . identifier[list_streams] ()[ literal[string] ] identifier[kinesis_streams] ={} keyword[for] identifier[stream_name] keyword[in] identifier[streams] : identifier[shard_ids] =[] identifier[shards] = identifier[conn] . identifier[describe_stream] ( identifier[stream_name] )[ literal[string] ][ literal[string] ] keyword[for] identifier[shard] keyword[in] identifier[shards] : identifier[shard_ids] . identifier[append] ( identifier[shard] [ literal[string] ]) identifier[kinesis_streams] [ identifier[stream_name] ]= identifier[shard_ids] keyword[return] identifier[kinesis_streams]
def list_kinesis_applications(region, filter_by_kwargs): """List all the kinesis applications along with the shards for each stream""" conn = boto.kinesis.connect_to_region(region) streams = conn.list_streams()['StreamNames'] kinesis_streams = {} for stream_name in streams: shard_ids = [] shards = conn.describe_stream(stream_name)['StreamDescription']['Shards'] for shard in shards: shard_ids.append(shard['ShardId']) # depends on [control=['for'], data=['shard']] kinesis_streams[stream_name] = shard_ids # depends on [control=['for'], data=['stream_name']] return kinesis_streams
def atan(x): """ Inverse tangent """ if isinstance(x, UncertainFunction): mcpts = np.arctan(x._mcpts) return UncertainFunction(mcpts) else: return np.arctan(x)
def function[atan, parameter[x]]: constant[ Inverse tangent ] if call[name[isinstance], parameter[name[x], name[UncertainFunction]]] begin[:] variable[mcpts] assign[=] call[name[np].arctan, parameter[name[x]._mcpts]] return[call[name[UncertainFunction], parameter[name[mcpts]]]]
keyword[def] identifier[atan] ( identifier[x] ): literal[string] keyword[if] identifier[isinstance] ( identifier[x] , identifier[UncertainFunction] ): identifier[mcpts] = identifier[np] . identifier[arctan] ( identifier[x] . identifier[_mcpts] ) keyword[return] identifier[UncertainFunction] ( identifier[mcpts] ) keyword[else] : keyword[return] identifier[np] . identifier[arctan] ( identifier[x] )
def atan(x): """ Inverse tangent """ if isinstance(x, UncertainFunction): mcpts = np.arctan(x._mcpts) return UncertainFunction(mcpts) # depends on [control=['if'], data=[]] else: return np.arctan(x)
def select_event( event = None, selection = "ejets" ): """ Select a HEP event. """ if selection == "ejets": # Require single lepton. # Require >= 4 jets. if \ 0 < len(event.el_pt) < 2 and \ len(event.jet_pt) >= 4 and \ len(event.ljet_m) >= 1: return True else: return False
def function[select_event, parameter[event, selection]]: constant[ Select a HEP event. ] if compare[name[selection] equal[==] constant[ejets]] begin[:] if <ast.BoolOp object at 0x7da1b28ff220> begin[:] return[constant[True]]
keyword[def] identifier[select_event] ( identifier[event] = keyword[None] , identifier[selection] = literal[string] ): literal[string] keyword[if] identifier[selection] == literal[string] : keyword[if] literal[int] < identifier[len] ( identifier[event] . identifier[el_pt] )< literal[int] keyword[and] identifier[len] ( identifier[event] . identifier[jet_pt] )>= literal[int] keyword[and] identifier[len] ( identifier[event] . identifier[ljet_m] )>= literal[int] : keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def select_event(event=None, selection='ejets'): """ Select a HEP event. """ if selection == 'ejets': # Require single lepton. # Require >= 4 jets. if 0 < len(event.el_pt) < 2 and len(event.jet_pt) >= 4 and (len(event.ljet_m) >= 1): return True # depends on [control=['if'], data=[]] else: return False # depends on [control=['if'], data=[]]
def check_column(state, name, missing_msg=None, expand_msg=None): """Zoom in on a particular column in the query result, by name. After zooming in on a column, which is represented as a single-column query result, you can use ``has_equal_value()`` to verify whether the column in the solution query result matches the column in student query result. Args: name: name of the column to zoom in on. missing_msg: if specified, this overrides the automatically generated feedback message in case the column is missing in the student query result. expand_msg: if specified, this overrides the automatically generated feedback message that is prepended to feedback messages that are thrown further in the SCT chain. :Example: Suppose we are testing the following SELECT statements * solution: ``SELECT artist_id as id, name FROM artists`` * student : ``SELECT artist_id, name FROM artists`` We can write the following SCTs: :: # fails, since no column named id in student result Ex().check_column('id') # passes, since a column named name is in student_result Ex().check_column('name') """ if missing_msg is None: missing_msg = "We expected to find a column named `{{name}}` in the result of your query, but couldn't." if expand_msg is None: expand_msg = "Have another look at your query result. " msg_kwargs = {"name": name} # check that query returned something has_result(state) stu_res = state.student_result sol_res = state.solution_result if name not in sol_res: raise BaseException("name %s not in solution column names" % name) if name not in stu_res: _msg = state.build_message(missing_msg, fmt_kwargs=msg_kwargs) state.do_test(_msg) return state.to_child( append_message={"msg": expand_msg, "kwargs": msg_kwargs}, student_result={name: stu_res[name]}, solution_result={name: sol_res[name]}, )
def function[check_column, parameter[state, name, missing_msg, expand_msg]]: constant[Zoom in on a particular column in the query result, by name. After zooming in on a column, which is represented as a single-column query result, you can use ``has_equal_value()`` to verify whether the column in the solution query result matches the column in student query result. Args: name: name of the column to zoom in on. missing_msg: if specified, this overrides the automatically generated feedback message in case the column is missing in the student query result. expand_msg: if specified, this overrides the automatically generated feedback message that is prepended to feedback messages that are thrown further in the SCT chain. :Example: Suppose we are testing the following SELECT statements * solution: ``SELECT artist_id as id, name FROM artists`` * student : ``SELECT artist_id, name FROM artists`` We can write the following SCTs: :: # fails, since no column named id in student result Ex().check_column('id') # passes, since a column named name is in student_result Ex().check_column('name') ] if compare[name[missing_msg] is constant[None]] begin[:] variable[missing_msg] assign[=] constant[We expected to find a column named `{{name}}` in the result of your query, but couldn't.] if compare[name[expand_msg] is constant[None]] begin[:] variable[expand_msg] assign[=] constant[Have another look at your query result. ] variable[msg_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b02be890>], [<ast.Name object at 0x7da1b02bcb20>]] call[name[has_result], parameter[name[state]]] variable[stu_res] assign[=] name[state].student_result variable[sol_res] assign[=] name[state].solution_result if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[sol_res]] begin[:] <ast.Raise object at 0x7da1b02bd4e0> if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[stu_res]] begin[:] variable[_msg] assign[=] call[name[state].build_message, parameter[name[missing_msg]]] call[name[state].do_test, parameter[name[_msg]]] return[call[name[state].to_child, parameter[]]]
keyword[def] identifier[check_column] ( identifier[state] , identifier[name] , identifier[missing_msg] = keyword[None] , identifier[expand_msg] = keyword[None] ): literal[string] keyword[if] identifier[missing_msg] keyword[is] keyword[None] : identifier[missing_msg] = literal[string] keyword[if] identifier[expand_msg] keyword[is] keyword[None] : identifier[expand_msg] = literal[string] identifier[msg_kwargs] ={ literal[string] : identifier[name] } identifier[has_result] ( identifier[state] ) identifier[stu_res] = identifier[state] . identifier[student_result] identifier[sol_res] = identifier[state] . identifier[solution_result] keyword[if] identifier[name] keyword[not] keyword[in] identifier[sol_res] : keyword[raise] identifier[BaseException] ( literal[string] % identifier[name] ) keyword[if] identifier[name] keyword[not] keyword[in] identifier[stu_res] : identifier[_msg] = identifier[state] . identifier[build_message] ( identifier[missing_msg] , identifier[fmt_kwargs] = identifier[msg_kwargs] ) identifier[state] . identifier[do_test] ( identifier[_msg] ) keyword[return] identifier[state] . identifier[to_child] ( identifier[append_message] ={ literal[string] : identifier[expand_msg] , literal[string] : identifier[msg_kwargs] }, identifier[student_result] ={ identifier[name] : identifier[stu_res] [ identifier[name] ]}, identifier[solution_result] ={ identifier[name] : identifier[sol_res] [ identifier[name] ]}, )
def check_column(state, name, missing_msg=None, expand_msg=None): """Zoom in on a particular column in the query result, by name. After zooming in on a column, which is represented as a single-column query result, you can use ``has_equal_value()`` to verify whether the column in the solution query result matches the column in student query result. Args: name: name of the column to zoom in on. missing_msg: if specified, this overrides the automatically generated feedback message in case the column is missing in the student query result. expand_msg: if specified, this overrides the automatically generated feedback message that is prepended to feedback messages that are thrown further in the SCT chain. :Example: Suppose we are testing the following SELECT statements * solution: ``SELECT artist_id as id, name FROM artists`` * student : ``SELECT artist_id, name FROM artists`` We can write the following SCTs: :: # fails, since no column named id in student result Ex().check_column('id') # passes, since a column named name is in student_result Ex().check_column('name') """ if missing_msg is None: missing_msg = "We expected to find a column named `{{name}}` in the result of your query, but couldn't." # depends on [control=['if'], data=['missing_msg']] if expand_msg is None: expand_msg = 'Have another look at your query result. ' # depends on [control=['if'], data=['expand_msg']] msg_kwargs = {'name': name} # check that query returned something has_result(state) stu_res = state.student_result sol_res = state.solution_result if name not in sol_res: raise BaseException('name %s not in solution column names' % name) # depends on [control=['if'], data=['name']] if name not in stu_res: _msg = state.build_message(missing_msg, fmt_kwargs=msg_kwargs) state.do_test(_msg) # depends on [control=['if'], data=[]] return state.to_child(append_message={'msg': expand_msg, 'kwargs': msg_kwargs}, student_result={name: stu_res[name]}, solution_result={name: sol_res[name]})
def add(self, data): """Add data to the buffer""" assert isinstance(data, bytes) with self.lock: self.buf += data
def function[add, parameter[self, data]]: constant[Add data to the buffer] assert[call[name[isinstance], parameter[name[data], name[bytes]]]] with name[self].lock begin[:] <ast.AugAssign object at 0x7da20e9b2b90>
keyword[def] identifier[add] ( identifier[self] , identifier[data] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[data] , identifier[bytes] ) keyword[with] identifier[self] . identifier[lock] : identifier[self] . identifier[buf] += identifier[data]
def add(self, data): """Add data to the buffer""" assert isinstance(data, bytes) with self.lock: self.buf += data # depends on [control=['with'], data=[]]
def safe_list_set(plist, idx, fill_with, value): """ Sets: ``` plist[idx] = value ``` If len(plist) is smaller than what idx is trying to dereferece, we first grow plist to get the needed capacity and fill the new elements with fill_with (or fill_with(), if it's a callable). """ try: plist[idx] = value return except IndexError: pass # Fill in the missing positions. Handle negative indexes. end = idx + 1 if idx >= 0 else abs(idx) for _ in range(len(plist), end): if callable(fill_with): plist.append(fill_with()) else: plist.append(fill_with) plist[idx] = value
def function[safe_list_set, parameter[plist, idx, fill_with, value]]: constant[ Sets: ``` plist[idx] = value ``` If len(plist) is smaller than what idx is trying to dereferece, we first grow plist to get the needed capacity and fill the new elements with fill_with (or fill_with(), if it's a callable). ] <ast.Try object at 0x7da18f00d6f0> variable[end] assign[=] <ast.IfExp object at 0x7da20c9934f0> for taget[name[_]] in starred[call[name[range], parameter[call[name[len], parameter[name[plist]]], name[end]]]] begin[:] if call[name[callable], parameter[name[fill_with]]] begin[:] call[name[plist].append, parameter[call[name[fill_with], parameter[]]]] call[name[plist]][name[idx]] assign[=] name[value]
keyword[def] identifier[safe_list_set] ( identifier[plist] , identifier[idx] , identifier[fill_with] , identifier[value] ): literal[string] keyword[try] : identifier[plist] [ identifier[idx] ]= identifier[value] keyword[return] keyword[except] identifier[IndexError] : keyword[pass] identifier[end] = identifier[idx] + literal[int] keyword[if] identifier[idx] >= literal[int] keyword[else] identifier[abs] ( identifier[idx] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[plist] ), identifier[end] ): keyword[if] identifier[callable] ( identifier[fill_with] ): identifier[plist] . identifier[append] ( identifier[fill_with] ()) keyword[else] : identifier[plist] . identifier[append] ( identifier[fill_with] ) identifier[plist] [ identifier[idx] ]= identifier[value]
def safe_list_set(plist, idx, fill_with, value): """ Sets: ``` plist[idx] = value ``` If len(plist) is smaller than what idx is trying to dereferece, we first grow plist to get the needed capacity and fill the new elements with fill_with (or fill_with(), if it's a callable). """ try: plist[idx] = value return # depends on [control=['try'], data=[]] except IndexError: pass # depends on [control=['except'], data=[]] # Fill in the missing positions. Handle negative indexes. end = idx + 1 if idx >= 0 else abs(idx) for _ in range(len(plist), end): if callable(fill_with): plist.append(fill_with()) # depends on [control=['if'], data=[]] else: plist.append(fill_with) # depends on [control=['for'], data=[]] plist[idx] = value
def get_shutit_pexpect_session_from_id(self, shutit_pexpect_id): """Get the pexpect session from the given identifier. """ shutit_global.shutit_global_object.yield_to_draw() for key in self.shutit_pexpect_sessions: if self.shutit_pexpect_sessions[key].pexpect_session_id == shutit_pexpect_id: return self.shutit_pexpect_sessions[key] return self.fail('Should not get here in get_shutit_pexpect_session_from_id',throw_exception=True)
def function[get_shutit_pexpect_session_from_id, parameter[self, shutit_pexpect_id]]: constant[Get the pexpect session from the given identifier. ] call[name[shutit_global].shutit_global_object.yield_to_draw, parameter[]] for taget[name[key]] in starred[name[self].shutit_pexpect_sessions] begin[:] if compare[call[name[self].shutit_pexpect_sessions][name[key]].pexpect_session_id equal[==] name[shutit_pexpect_id]] begin[:] return[call[name[self].shutit_pexpect_sessions][name[key]]] return[call[name[self].fail, parameter[constant[Should not get here in get_shutit_pexpect_session_from_id]]]]
keyword[def] identifier[get_shutit_pexpect_session_from_id] ( identifier[self] , identifier[shutit_pexpect_id] ): literal[string] identifier[shutit_global] . identifier[shutit_global_object] . identifier[yield_to_draw] () keyword[for] identifier[key] keyword[in] identifier[self] . identifier[shutit_pexpect_sessions] : keyword[if] identifier[self] . identifier[shutit_pexpect_sessions] [ identifier[key] ]. identifier[pexpect_session_id] == identifier[shutit_pexpect_id] : keyword[return] identifier[self] . identifier[shutit_pexpect_sessions] [ identifier[key] ] keyword[return] identifier[self] . identifier[fail] ( literal[string] , identifier[throw_exception] = keyword[True] )
def get_shutit_pexpect_session_from_id(self, shutit_pexpect_id): """Get the pexpect session from the given identifier. """ shutit_global.shutit_global_object.yield_to_draw() for key in self.shutit_pexpect_sessions: if self.shutit_pexpect_sessions[key].pexpect_session_id == shutit_pexpect_id: return self.shutit_pexpect_sessions[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] return self.fail('Should not get here in get_shutit_pexpect_session_from_id', throw_exception=True)
def plot_d_delta_m(fignum, Bdm, DdeltaM, s): """ function to plot d (Delta M)/dB curves Parameters __________ fignum : matplotlib figure number Bdm : change in field Ddelta M : change in delta M s : specimen name """ plt.figure(num=fignum) plt.clf() if not isServer: plt.figtext(.02, .01, version_num) start = len(Bdm) - len(DdeltaM) plt.plot(Bdm[start:], DdeltaM, 'b') plt.xlabel('B (T)') plt.ylabel('d (Delta M)/dB') plt.title(s)
def function[plot_d_delta_m, parameter[fignum, Bdm, DdeltaM, s]]: constant[ function to plot d (Delta M)/dB curves Parameters __________ fignum : matplotlib figure number Bdm : change in field Ddelta M : change in delta M s : specimen name ] call[name[plt].figure, parameter[]] call[name[plt].clf, parameter[]] if <ast.UnaryOp object at 0x7da1b056c190> begin[:] call[name[plt].figtext, parameter[constant[0.02], constant[0.01], name[version_num]]] variable[start] assign[=] binary_operation[call[name[len], parameter[name[Bdm]]] - call[name[len], parameter[name[DdeltaM]]]] call[name[plt].plot, parameter[call[name[Bdm]][<ast.Slice object at 0x7da1b056c310>], name[DdeltaM], constant[b]]] call[name[plt].xlabel, parameter[constant[B (T)]]] call[name[plt].ylabel, parameter[constant[d (Delta M)/dB]]] call[name[plt].title, parameter[name[s]]]
keyword[def] identifier[plot_d_delta_m] ( identifier[fignum] , identifier[Bdm] , identifier[DdeltaM] , identifier[s] ): literal[string] identifier[plt] . identifier[figure] ( identifier[num] = identifier[fignum] ) identifier[plt] . identifier[clf] () keyword[if] keyword[not] identifier[isServer] : identifier[plt] . identifier[figtext] ( literal[int] , literal[int] , identifier[version_num] ) identifier[start] = identifier[len] ( identifier[Bdm] )- identifier[len] ( identifier[DdeltaM] ) identifier[plt] . identifier[plot] ( identifier[Bdm] [ identifier[start] :], identifier[DdeltaM] , literal[string] ) identifier[plt] . identifier[xlabel] ( literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[plt] . identifier[title] ( identifier[s] )
def plot_d_delta_m(fignum, Bdm, DdeltaM, s): """ function to plot d (Delta M)/dB curves Parameters __________ fignum : matplotlib figure number Bdm : change in field Ddelta M : change in delta M s : specimen name """ plt.figure(num=fignum) plt.clf() if not isServer: plt.figtext(0.02, 0.01, version_num) # depends on [control=['if'], data=[]] start = len(Bdm) - len(DdeltaM) plt.plot(Bdm[start:], DdeltaM, 'b') plt.xlabel('B (T)') plt.ylabel('d (Delta M)/dB') plt.title(s)
def _at_content(self, calculator, rule, scope, block): """ Implements @content """ if '@content' not in rule.options: log.error("Content string not found for @content (%s)", rule.file_and_line) rule.unparsed_contents = rule.options.pop('@content', '') self.manage_children(rule, scope)
def function[_at_content, parameter[self, calculator, rule, scope, block]]: constant[ Implements @content ] if compare[constant[@content] <ast.NotIn object at 0x7da2590d7190> name[rule].options] begin[:] call[name[log].error, parameter[constant[Content string not found for @content (%s)], name[rule].file_and_line]] name[rule].unparsed_contents assign[=] call[name[rule].options.pop, parameter[constant[@content], constant[]]] call[name[self].manage_children, parameter[name[rule], name[scope]]]
keyword[def] identifier[_at_content] ( identifier[self] , identifier[calculator] , identifier[rule] , identifier[scope] , identifier[block] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[rule] . identifier[options] : identifier[log] . identifier[error] ( literal[string] , identifier[rule] . identifier[file_and_line] ) identifier[rule] . identifier[unparsed_contents] = identifier[rule] . identifier[options] . identifier[pop] ( literal[string] , literal[string] ) identifier[self] . identifier[manage_children] ( identifier[rule] , identifier[scope] )
def _at_content(self, calculator, rule, scope, block): """ Implements @content """ if '@content' not in rule.options: log.error('Content string not found for @content (%s)', rule.file_and_line) # depends on [control=['if'], data=[]] rule.unparsed_contents = rule.options.pop('@content', '') self.manage_children(rule, scope)
def unused_keys(self): """Lists all keys which are present in the ConfigTree but which have not been accessed.""" unused = set() for k, c in self._children.items(): if isinstance(c, ConfigNode): if not c.has_been_accessed(): unused.add(k) else: for ck in c.unused_keys(): unused.add(k+'.'+ck) return unused
def function[unused_keys, parameter[self]]: constant[Lists all keys which are present in the ConfigTree but which have not been accessed.] variable[unused] assign[=] call[name[set], parameter[]] for taget[tuple[[<ast.Name object at 0x7da18dc99690>, <ast.Name object at 0x7da18dc98f70>]]] in starred[call[name[self]._children.items, parameter[]]] begin[:] if call[name[isinstance], parameter[name[c], name[ConfigNode]]] begin[:] if <ast.UnaryOp object at 0x7da18c4ceb90> begin[:] call[name[unused].add, parameter[name[k]]] return[name[unused]]
keyword[def] identifier[unused_keys] ( identifier[self] ): literal[string] identifier[unused] = identifier[set] () keyword[for] identifier[k] , identifier[c] keyword[in] identifier[self] . identifier[_children] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[c] , identifier[ConfigNode] ): keyword[if] keyword[not] identifier[c] . identifier[has_been_accessed] (): identifier[unused] . identifier[add] ( identifier[k] ) keyword[else] : keyword[for] identifier[ck] keyword[in] identifier[c] . identifier[unused_keys] (): identifier[unused] . identifier[add] ( identifier[k] + literal[string] + identifier[ck] ) keyword[return] identifier[unused]
def unused_keys(self): """Lists all keys which are present in the ConfigTree but which have not been accessed.""" unused = set() for (k, c) in self._children.items(): if isinstance(c, ConfigNode): if not c.has_been_accessed(): unused.add(k) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: for ck in c.unused_keys(): unused.add(k + '.' + ck) # depends on [control=['for'], data=['ck']] # depends on [control=['for'], data=[]] return unused
def max_likelihood(self, data, weights=None, stats=None): """ Maximize the likelihood for given data :param data: :param weights: :param stats: :return: """ if isinstance(data, list): x = np.vstack([d[0] for d in data]) y = np.vstack([d[1] for d in data]) elif isinstance(data, tuple): assert len(data) == 2 elif isinstance(data, np.ndarray): x, y = data[:,:self.D_in], data[:, self.D_in:] else: raise Exception("Invalid data type") from sklearn.linear_model import LogisticRegression for n in progprint_xrange(self.D_out): lr = LogisticRegression(fit_intercept=False) lr.fit(x, y[:,n]) self.A[n] = lr.coef_
def function[max_likelihood, parameter[self, data, weights, stats]]: constant[ Maximize the likelihood for given data :param data: :param weights: :param stats: :return: ] if call[name[isinstance], parameter[name[data], name[list]]] begin[:] variable[x] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da2054a4160>]] variable[y] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da2054a5f60>]] from relative_module[sklearn.linear_model] import module[LogisticRegression] for taget[name[n]] in starred[call[name[progprint_xrange], parameter[name[self].D_out]]] begin[:] variable[lr] assign[=] call[name[LogisticRegression], parameter[]] call[name[lr].fit, parameter[name[x], call[name[y]][tuple[[<ast.Slice object at 0x7da2054a5c00>, <ast.Name object at 0x7da2054a5120>]]]]] call[name[self].A][name[n]] assign[=] name[lr].coef_
keyword[def] identifier[max_likelihood] ( identifier[self] , identifier[data] , identifier[weights] = keyword[None] , identifier[stats] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[list] ): identifier[x] = identifier[np] . identifier[vstack] ([ identifier[d] [ literal[int] ] keyword[for] identifier[d] keyword[in] identifier[data] ]) identifier[y] = identifier[np] . identifier[vstack] ([ identifier[d] [ literal[int] ] keyword[for] identifier[d] keyword[in] identifier[data] ]) keyword[elif] identifier[isinstance] ( identifier[data] , identifier[tuple] ): keyword[assert] identifier[len] ( identifier[data] )== literal[int] keyword[elif] identifier[isinstance] ( identifier[data] , identifier[np] . identifier[ndarray] ): identifier[x] , identifier[y] = identifier[data] [:,: identifier[self] . identifier[D_in] ], identifier[data] [:, identifier[self] . identifier[D_in] :] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[from] identifier[sklearn] . identifier[linear_model] keyword[import] identifier[LogisticRegression] keyword[for] identifier[n] keyword[in] identifier[progprint_xrange] ( identifier[self] . identifier[D_out] ): identifier[lr] = identifier[LogisticRegression] ( identifier[fit_intercept] = keyword[False] ) identifier[lr] . identifier[fit] ( identifier[x] , identifier[y] [:, identifier[n] ]) identifier[self] . identifier[A] [ identifier[n] ]= identifier[lr] . identifier[coef_]
def max_likelihood(self, data, weights=None, stats=None): """ Maximize the likelihood for given data :param data: :param weights: :param stats: :return: """ if isinstance(data, list): x = np.vstack([d[0] for d in data]) y = np.vstack([d[1] for d in data]) # depends on [control=['if'], data=[]] elif isinstance(data, tuple): assert len(data) == 2 # depends on [control=['if'], data=[]] elif isinstance(data, np.ndarray): (x, y) = (data[:, :self.D_in], data[:, self.D_in:]) # depends on [control=['if'], data=[]] else: raise Exception('Invalid data type') from sklearn.linear_model import LogisticRegression for n in progprint_xrange(self.D_out): lr = LogisticRegression(fit_intercept=False) lr.fit(x, y[:, n]) self.A[n] = lr.coef_ # depends on [control=['for'], data=['n']]
def _decode_codepage(codepage, data): """ Args: codepage (int) data (bytes) Returns: `text` Decodes data using the given codepage. If some data can't be decoded using the codepage it will not fail. """ assert isinstance(data, bytes) if not data: return u"" # get the required buffer length first length = winapi.MultiByteToWideChar(codepage, 0, data, len(data), None, 0) if length == 0: raise ctypes.WinError() # now decode buf = ctypes.create_unicode_buffer(length) length = winapi.MultiByteToWideChar( codepage, 0, data, len(data), buf, length) if length == 0: raise ctypes.WinError() return buf[:]
def function[_decode_codepage, parameter[codepage, data]]: constant[ Args: codepage (int) data (bytes) Returns: `text` Decodes data using the given codepage. If some data can't be decoded using the codepage it will not fail. ] assert[call[name[isinstance], parameter[name[data], name[bytes]]]] if <ast.UnaryOp object at 0x7da1b1e94370> begin[:] return[constant[]] variable[length] assign[=] call[name[winapi].MultiByteToWideChar, parameter[name[codepage], constant[0], name[data], call[name[len], parameter[name[data]]], constant[None], constant[0]]] if compare[name[length] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b20f9960> variable[buf] assign[=] call[name[ctypes].create_unicode_buffer, parameter[name[length]]] variable[length] assign[=] call[name[winapi].MultiByteToWideChar, parameter[name[codepage], constant[0], name[data], call[name[len], parameter[name[data]]], name[buf], name[length]]] if compare[name[length] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b20f9360> return[call[name[buf]][<ast.Slice object at 0x7da1b20fb880>]]
keyword[def] identifier[_decode_codepage] ( identifier[codepage] , identifier[data] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[data] , identifier[bytes] ) keyword[if] keyword[not] identifier[data] : keyword[return] literal[string] identifier[length] = identifier[winapi] . identifier[MultiByteToWideChar] ( identifier[codepage] , literal[int] , identifier[data] , identifier[len] ( identifier[data] ), keyword[None] , literal[int] ) keyword[if] identifier[length] == literal[int] : keyword[raise] identifier[ctypes] . identifier[WinError] () identifier[buf] = identifier[ctypes] . identifier[create_unicode_buffer] ( identifier[length] ) identifier[length] = identifier[winapi] . identifier[MultiByteToWideChar] ( identifier[codepage] , literal[int] , identifier[data] , identifier[len] ( identifier[data] ), identifier[buf] , identifier[length] ) keyword[if] identifier[length] == literal[int] : keyword[raise] identifier[ctypes] . identifier[WinError] () keyword[return] identifier[buf] [:]
def _decode_codepage(codepage, data): """ Args: codepage (int) data (bytes) Returns: `text` Decodes data using the given codepage. If some data can't be decoded using the codepage it will not fail. """ assert isinstance(data, bytes) if not data: return u'' # depends on [control=['if'], data=[]] # get the required buffer length first length = winapi.MultiByteToWideChar(codepage, 0, data, len(data), None, 0) if length == 0: raise ctypes.WinError() # depends on [control=['if'], data=[]] # now decode buf = ctypes.create_unicode_buffer(length) length = winapi.MultiByteToWideChar(codepage, 0, data, len(data), buf, length) if length == 0: raise ctypes.WinError() # depends on [control=['if'], data=[]] return buf[:]
def get_transactions_csv(self, include_investment=False, acct=0): """Returns the raw CSV transaction data as downloaded from Mint. If include_investment == True, also includes transactions that Mint classifies as investment-related. You may find that the investment transaction data is not sufficiently detailed to actually be useful, however. """ # Specifying accountId=0 causes Mint to return investment # transactions as well. Otherwise they are skipped by # default. params = None if include_investment or acct > 0: params = {'accountId': acct} result = self.request_and_check( '{}/transactionDownload.event'.format(MINT_ROOT_URL), params=params, expected_content_type='text/csv') return result.content
def function[get_transactions_csv, parameter[self, include_investment, acct]]: constant[Returns the raw CSV transaction data as downloaded from Mint. If include_investment == True, also includes transactions that Mint classifies as investment-related. You may find that the investment transaction data is not sufficiently detailed to actually be useful, however. ] variable[params] assign[=] constant[None] if <ast.BoolOp object at 0x7da2054a7400> begin[:] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da2054a6110>], [<ast.Name object at 0x7da2054a6860>]] variable[result] assign[=] call[name[self].request_and_check, parameter[call[constant[{}/transactionDownload.event].format, parameter[name[MINT_ROOT_URL]]]]] return[name[result].content]
keyword[def] identifier[get_transactions_csv] ( identifier[self] , identifier[include_investment] = keyword[False] , identifier[acct] = literal[int] ): literal[string] identifier[params] = keyword[None] keyword[if] identifier[include_investment] keyword[or] identifier[acct] > literal[int] : identifier[params] ={ literal[string] : identifier[acct] } identifier[result] = identifier[self] . identifier[request_and_check] ( literal[string] . identifier[format] ( identifier[MINT_ROOT_URL] ), identifier[params] = identifier[params] , identifier[expected_content_type] = literal[string] ) keyword[return] identifier[result] . identifier[content]
def get_transactions_csv(self, include_investment=False, acct=0): """Returns the raw CSV transaction data as downloaded from Mint. If include_investment == True, also includes transactions that Mint classifies as investment-related. You may find that the investment transaction data is not sufficiently detailed to actually be useful, however. """ # Specifying accountId=0 causes Mint to return investment # transactions as well. Otherwise they are skipped by # default. params = None if include_investment or acct > 0: params = {'accountId': acct} # depends on [control=['if'], data=[]] result = self.request_and_check('{}/transactionDownload.event'.format(MINT_ROOT_URL), params=params, expected_content_type='text/csv') return result.content
def sanity_check( state: MediatorTransferState, channelidentifiers_to_channels: ChannelMap, ) -> None: """ Check invariants that must hold. """ # if a transfer is paid we must know the secret all_transfers_states = itertools.chain( (pair.payee_state for pair in state.transfers_pair), (pair.payer_state for pair in state.transfers_pair), ) if any(state in STATE_TRANSFER_PAID for state in all_transfers_states): assert state.secret is not None # the "transitivity" for these values is checked below as part of # almost_equal check if state.transfers_pair: first_pair = state.transfers_pair[0] assert state.secrethash == first_pair.payer_transfer.lock.secrethash for pair in state.transfers_pair: payee_channel = get_payee_channel( channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=pair, ) # Channel could have been removed if not payee_channel: continue assert is_send_transfer_almost_equal( send_channel=payee_channel, send=pair.payee_transfer, received=pair.payer_transfer, ) assert pair.payer_state in pair.valid_payer_states assert pair.payee_state in pair.valid_payee_states for original, refund in zip(state.transfers_pair[:-1], state.transfers_pair[1:]): assert original.payee_address == refund.payer_address payer_channel = get_payer_channel( channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=refund, ) # Channel could have been removed if not payer_channel: continue transfer_sent = original.payee_transfer transfer_received = refund.payer_transfer assert is_send_transfer_almost_equal( send_channel=payer_channel, send=transfer_sent, received=transfer_received, ) if state.waiting_transfer and state.transfers_pair: last_transfer_pair = state.transfers_pair[-1] payee_channel = get_payee_channel( channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=last_transfer_pair, ) # Channel could have been removed if payee_channel: transfer_sent = last_transfer_pair.payee_transfer transfer_received = state.waiting_transfer.transfer assert is_send_transfer_almost_equal( send_channel=payee_channel, send=transfer_sent, received=transfer_received, )
def function[sanity_check, parameter[state, channelidentifiers_to_channels]]: constant[ Check invariants that must hold. ] variable[all_transfers_states] assign[=] call[name[itertools].chain, parameter[<ast.GeneratorExp object at 0x7da1b19511e0>, <ast.GeneratorExp object at 0x7da1b1951090>]] if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b1950400>]] begin[:] assert[compare[name[state].secret is_not constant[None]]] if name[state].transfers_pair begin[:] variable[first_pair] assign[=] call[name[state].transfers_pair][constant[0]] assert[compare[name[state].secrethash equal[==] name[first_pair].payer_transfer.lock.secrethash]] for taget[name[pair]] in starred[name[state].transfers_pair] begin[:] variable[payee_channel] assign[=] call[name[get_payee_channel], parameter[]] if <ast.UnaryOp object at 0x7da1b196e8c0> begin[:] continue assert[call[name[is_send_transfer_almost_equal], parameter[]]] assert[compare[name[pair].payer_state in name[pair].valid_payer_states]] assert[compare[name[pair].payee_state in name[pair].valid_payee_states]] for taget[tuple[[<ast.Name object at 0x7da1b196f040>, <ast.Name object at 0x7da1b196d5a0>]]] in starred[call[name[zip], parameter[call[name[state].transfers_pair][<ast.Slice object at 0x7da1b196d870>], call[name[state].transfers_pair][<ast.Slice object at 0x7da1b196f1f0>]]]] begin[:] assert[compare[name[original].payee_address equal[==] name[refund].payer_address]] variable[payer_channel] assign[=] call[name[get_payer_channel], parameter[]] if <ast.UnaryOp object at 0x7da1b18bded0> begin[:] continue variable[transfer_sent] assign[=] name[original].payee_transfer variable[transfer_received] assign[=] name[refund].payer_transfer assert[call[name[is_send_transfer_almost_equal], parameter[]]] if <ast.BoolOp object at 0x7da1b184c460> begin[:] variable[last_transfer_pair] assign[=] call[name[state].transfers_pair][<ast.UnaryOp object at 0x7da1b19f0ee0>] variable[payee_channel] assign[=] call[name[get_payee_channel], parameter[]] if name[payee_channel] begin[:] variable[transfer_sent] assign[=] name[last_transfer_pair].payee_transfer variable[transfer_received] assign[=] name[state].waiting_transfer.transfer assert[call[name[is_send_transfer_almost_equal], parameter[]]]
keyword[def] identifier[sanity_check] ( identifier[state] : identifier[MediatorTransferState] , identifier[channelidentifiers_to_channels] : identifier[ChannelMap] , )-> keyword[None] : literal[string] identifier[all_transfers_states] = identifier[itertools] . identifier[chain] ( ( identifier[pair] . identifier[payee_state] keyword[for] identifier[pair] keyword[in] identifier[state] . identifier[transfers_pair] ), ( identifier[pair] . identifier[payer_state] keyword[for] identifier[pair] keyword[in] identifier[state] . identifier[transfers_pair] ), ) keyword[if] identifier[any] ( identifier[state] keyword[in] identifier[STATE_TRANSFER_PAID] keyword[for] identifier[state] keyword[in] identifier[all_transfers_states] ): keyword[assert] identifier[state] . identifier[secret] keyword[is] keyword[not] keyword[None] keyword[if] identifier[state] . identifier[transfers_pair] : identifier[first_pair] = identifier[state] . identifier[transfers_pair] [ literal[int] ] keyword[assert] identifier[state] . identifier[secrethash] == identifier[first_pair] . identifier[payer_transfer] . identifier[lock] . identifier[secrethash] keyword[for] identifier[pair] keyword[in] identifier[state] . identifier[transfers_pair] : identifier[payee_channel] = identifier[get_payee_channel] ( identifier[channelidentifiers_to_channels] = identifier[channelidentifiers_to_channels] , identifier[transfer_pair] = identifier[pair] , ) keyword[if] keyword[not] identifier[payee_channel] : keyword[continue] keyword[assert] identifier[is_send_transfer_almost_equal] ( identifier[send_channel] = identifier[payee_channel] , identifier[send] = identifier[pair] . identifier[payee_transfer] , identifier[received] = identifier[pair] . identifier[payer_transfer] , ) keyword[assert] identifier[pair] . identifier[payer_state] keyword[in] identifier[pair] . identifier[valid_payer_states] keyword[assert] identifier[pair] . identifier[payee_state] keyword[in] identifier[pair] . identifier[valid_payee_states] keyword[for] identifier[original] , identifier[refund] keyword[in] identifier[zip] ( identifier[state] . identifier[transfers_pair] [:- literal[int] ], identifier[state] . identifier[transfers_pair] [ literal[int] :]): keyword[assert] identifier[original] . identifier[payee_address] == identifier[refund] . identifier[payer_address] identifier[payer_channel] = identifier[get_payer_channel] ( identifier[channelidentifiers_to_channels] = identifier[channelidentifiers_to_channels] , identifier[transfer_pair] = identifier[refund] , ) keyword[if] keyword[not] identifier[payer_channel] : keyword[continue] identifier[transfer_sent] = identifier[original] . identifier[payee_transfer] identifier[transfer_received] = identifier[refund] . identifier[payer_transfer] keyword[assert] identifier[is_send_transfer_almost_equal] ( identifier[send_channel] = identifier[payer_channel] , identifier[send] = identifier[transfer_sent] , identifier[received] = identifier[transfer_received] , ) keyword[if] identifier[state] . identifier[waiting_transfer] keyword[and] identifier[state] . identifier[transfers_pair] : identifier[last_transfer_pair] = identifier[state] . identifier[transfers_pair] [- literal[int] ] identifier[payee_channel] = identifier[get_payee_channel] ( identifier[channelidentifiers_to_channels] = identifier[channelidentifiers_to_channels] , identifier[transfer_pair] = identifier[last_transfer_pair] , ) keyword[if] identifier[payee_channel] : identifier[transfer_sent] = identifier[last_transfer_pair] . identifier[payee_transfer] identifier[transfer_received] = identifier[state] . identifier[waiting_transfer] . identifier[transfer] keyword[assert] identifier[is_send_transfer_almost_equal] ( identifier[send_channel] = identifier[payee_channel] , identifier[send] = identifier[transfer_sent] , identifier[received] = identifier[transfer_received] , )
def sanity_check(state: MediatorTransferState, channelidentifiers_to_channels: ChannelMap) -> None: """ Check invariants that must hold. """ # if a transfer is paid we must know the secret all_transfers_states = itertools.chain((pair.payee_state for pair in state.transfers_pair), (pair.payer_state for pair in state.transfers_pair)) if any((state in STATE_TRANSFER_PAID for state in all_transfers_states)): assert state.secret is not None # depends on [control=['if'], data=[]] # the "transitivity" for these values is checked below as part of # almost_equal check if state.transfers_pair: first_pair = state.transfers_pair[0] assert state.secrethash == first_pair.payer_transfer.lock.secrethash # depends on [control=['if'], data=[]] for pair in state.transfers_pair: payee_channel = get_payee_channel(channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=pair) # Channel could have been removed if not payee_channel: continue # depends on [control=['if'], data=[]] assert is_send_transfer_almost_equal(send_channel=payee_channel, send=pair.payee_transfer, received=pair.payer_transfer) assert pair.payer_state in pair.valid_payer_states assert pair.payee_state in pair.valid_payee_states # depends on [control=['for'], data=['pair']] for (original, refund) in zip(state.transfers_pair[:-1], state.transfers_pair[1:]): assert original.payee_address == refund.payer_address payer_channel = get_payer_channel(channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=refund) # Channel could have been removed if not payer_channel: continue # depends on [control=['if'], data=[]] transfer_sent = original.payee_transfer transfer_received = refund.payer_transfer assert is_send_transfer_almost_equal(send_channel=payer_channel, send=transfer_sent, received=transfer_received) # depends on [control=['for'], data=[]] if state.waiting_transfer and state.transfers_pair: last_transfer_pair = state.transfers_pair[-1] payee_channel = get_payee_channel(channelidentifiers_to_channels=channelidentifiers_to_channels, transfer_pair=last_transfer_pair) # Channel could have been removed if payee_channel: transfer_sent = last_transfer_pair.payee_transfer transfer_received = state.waiting_transfer.transfer assert is_send_transfer_almost_equal(send_channel=payee_channel, send=transfer_sent, received=transfer_received) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def popitem (self): """Remove oldest key from dict and return item.""" if self._keys: k = self._keys[0] v = self[k] del self[k] return (k, v) raise KeyError("popitem() on empty dictionary")
def function[popitem, parameter[self]]: constant[Remove oldest key from dict and return item.] if name[self]._keys begin[:] variable[k] assign[=] call[name[self]._keys][constant[0]] variable[v] assign[=] call[name[self]][name[k]] <ast.Delete object at 0x7da1b0aba3b0> return[tuple[[<ast.Name object at 0x7da1b0ab8610>, <ast.Name object at 0x7da1b0ab9120>]]] <ast.Raise object at 0x7da1b0ab9570>
keyword[def] identifier[popitem] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_keys] : identifier[k] = identifier[self] . identifier[_keys] [ literal[int] ] identifier[v] = identifier[self] [ identifier[k] ] keyword[del] identifier[self] [ identifier[k] ] keyword[return] ( identifier[k] , identifier[v] ) keyword[raise] identifier[KeyError] ( literal[string] )
def popitem(self): """Remove oldest key from dict and return item.""" if self._keys: k = self._keys[0] v = self[k] del self[k] return (k, v) # depends on [control=['if'], data=[]] raise KeyError('popitem() on empty dictionary')
def _update_solution_data(self, s, HH, CC, C0): """ Returns the voltage angle and generator set-point vectors. """ x = s["x"] Va_v = self.om.get_var("Va") Pg_v = self.om.get_var("Pg") Va = x[Va_v.i1:Va_v.iN + 1] Pg = x[Pg_v.i1:Pg_v.iN + 1] # f = 0.5 * dot(x.T * HH, x) + dot(CC.T, x) s["f"] = s["f"] + C0 # Put the objective function value in the solution. # solution["f"] = f return Va, Pg
def function[_update_solution_data, parameter[self, s, HH, CC, C0]]: constant[ Returns the voltage angle and generator set-point vectors. ] variable[x] assign[=] call[name[s]][constant[x]] variable[Va_v] assign[=] call[name[self].om.get_var, parameter[constant[Va]]] variable[Pg_v] assign[=] call[name[self].om.get_var, parameter[constant[Pg]]] variable[Va] assign[=] call[name[x]][<ast.Slice object at 0x7da1b2492410>] variable[Pg] assign[=] call[name[x]][<ast.Slice object at 0x7da1b2490b50>] call[name[s]][constant[f]] assign[=] binary_operation[call[name[s]][constant[f]] + name[C0]] return[tuple[[<ast.Name object at 0x7da1b2491420>, <ast.Name object at 0x7da1b2490c10>]]]
keyword[def] identifier[_update_solution_data] ( identifier[self] , identifier[s] , identifier[HH] , identifier[CC] , identifier[C0] ): literal[string] identifier[x] = identifier[s] [ literal[string] ] identifier[Va_v] = identifier[self] . identifier[om] . identifier[get_var] ( literal[string] ) identifier[Pg_v] = identifier[self] . identifier[om] . identifier[get_var] ( literal[string] ) identifier[Va] = identifier[x] [ identifier[Va_v] . identifier[i1] : identifier[Va_v] . identifier[iN] + literal[int] ] identifier[Pg] = identifier[x] [ identifier[Pg_v] . identifier[i1] : identifier[Pg_v] . identifier[iN] + literal[int] ] identifier[s] [ literal[string] ]= identifier[s] [ literal[string] ]+ identifier[C0] keyword[return] identifier[Va] , identifier[Pg]
def _update_solution_data(self, s, HH, CC, C0): """ Returns the voltage angle and generator set-point vectors. """ x = s['x'] Va_v = self.om.get_var('Va') Pg_v = self.om.get_var('Pg') Va = x[Va_v.i1:Va_v.iN + 1] Pg = x[Pg_v.i1:Pg_v.iN + 1] # f = 0.5 * dot(x.T * HH, x) + dot(CC.T, x) s['f'] = s['f'] + C0 # Put the objective function value in the solution. # solution["f"] = f return (Va, Pg)
def kube_node_status_condition(self, metric, scraper_config): """ The ready status of a cluster node. v1.0+""" base_check_name = scraper_config['namespace'] + '.node' metric_name = scraper_config['namespace'] + '.nodes.by_condition' by_condition_counter = Counter() for sample in metric.samples: node_tag = self._label_to_tag("node", sample[self.SAMPLE_LABELS], scraper_config) self._condition_to_tag_check( sample, base_check_name, self.condition_to_status_positive, scraper_config, tags=[node_tag] + scraper_config['custom_tags'], ) # Counts aggregated cluster-wide to avoid no-data issues on node churn, # node granularity available in the service checks tags = [ self._label_to_tag("condition", sample[self.SAMPLE_LABELS], scraper_config), self._label_to_tag("status", sample[self.SAMPLE_LABELS], scraper_config), ] + scraper_config['custom_tags'] by_condition_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE] for tags, count in iteritems(by_condition_counter): self.gauge(metric_name, count, tags=list(tags))
def function[kube_node_status_condition, parameter[self, metric, scraper_config]]: constant[ The ready status of a cluster node. v1.0+] variable[base_check_name] assign[=] binary_operation[call[name[scraper_config]][constant[namespace]] + constant[.node]] variable[metric_name] assign[=] binary_operation[call[name[scraper_config]][constant[namespace]] + constant[.nodes.by_condition]] variable[by_condition_counter] assign[=] call[name[Counter], parameter[]] for taget[name[sample]] in starred[name[metric].samples] begin[:] variable[node_tag] assign[=] call[name[self]._label_to_tag, parameter[constant[node], call[name[sample]][name[self].SAMPLE_LABELS], name[scraper_config]]] call[name[self]._condition_to_tag_check, parameter[name[sample], name[base_check_name], name[self].condition_to_status_positive, name[scraper_config]]] variable[tags] assign[=] binary_operation[list[[<ast.Call object at 0x7da18f00c1c0>, <ast.Call object at 0x7da18f00dbd0>]] + call[name[scraper_config]][constant[custom_tags]]] <ast.AugAssign object at 0x7da18f00c790> for taget[tuple[[<ast.Name object at 0x7da18f00fc40>, <ast.Name object at 0x7da18f00d7e0>]]] in starred[call[name[iteritems], parameter[name[by_condition_counter]]]] begin[:] call[name[self].gauge, parameter[name[metric_name], name[count]]]
keyword[def] identifier[kube_node_status_condition] ( identifier[self] , identifier[metric] , identifier[scraper_config] ): literal[string] identifier[base_check_name] = identifier[scraper_config] [ literal[string] ]+ literal[string] identifier[metric_name] = identifier[scraper_config] [ literal[string] ]+ literal[string] identifier[by_condition_counter] = identifier[Counter] () keyword[for] identifier[sample] keyword[in] identifier[metric] . identifier[samples] : identifier[node_tag] = identifier[self] . identifier[_label_to_tag] ( literal[string] , identifier[sample] [ identifier[self] . identifier[SAMPLE_LABELS] ], identifier[scraper_config] ) identifier[self] . identifier[_condition_to_tag_check] ( identifier[sample] , identifier[base_check_name] , identifier[self] . identifier[condition_to_status_positive] , identifier[scraper_config] , identifier[tags] =[ identifier[node_tag] ]+ identifier[scraper_config] [ literal[string] ], ) identifier[tags] =[ identifier[self] . identifier[_label_to_tag] ( literal[string] , identifier[sample] [ identifier[self] . identifier[SAMPLE_LABELS] ], identifier[scraper_config] ), identifier[self] . identifier[_label_to_tag] ( literal[string] , identifier[sample] [ identifier[self] . identifier[SAMPLE_LABELS] ], identifier[scraper_config] ), ]+ identifier[scraper_config] [ literal[string] ] identifier[by_condition_counter] [ identifier[tuple] ( identifier[sorted] ( identifier[tags] ))]+= identifier[sample] [ identifier[self] . identifier[SAMPLE_VALUE] ] keyword[for] identifier[tags] , identifier[count] keyword[in] identifier[iteritems] ( identifier[by_condition_counter] ): identifier[self] . identifier[gauge] ( identifier[metric_name] , identifier[count] , identifier[tags] = identifier[list] ( identifier[tags] ))
def kube_node_status_condition(self, metric, scraper_config): """ The ready status of a cluster node. v1.0+""" base_check_name = scraper_config['namespace'] + '.node' metric_name = scraper_config['namespace'] + '.nodes.by_condition' by_condition_counter = Counter() for sample in metric.samples: node_tag = self._label_to_tag('node', sample[self.SAMPLE_LABELS], scraper_config) self._condition_to_tag_check(sample, base_check_name, self.condition_to_status_positive, scraper_config, tags=[node_tag] + scraper_config['custom_tags']) # Counts aggregated cluster-wide to avoid no-data issues on node churn, # node granularity available in the service checks tags = [self._label_to_tag('condition', sample[self.SAMPLE_LABELS], scraper_config), self._label_to_tag('status', sample[self.SAMPLE_LABELS], scraper_config)] + scraper_config['custom_tags'] by_condition_counter[tuple(sorted(tags))] += sample[self.SAMPLE_VALUE] # depends on [control=['for'], data=['sample']] for (tags, count) in iteritems(by_condition_counter): self.gauge(metric_name, count, tags=list(tags)) # depends on [control=['for'], data=[]]
def readTableFromDelimited(f, separator="\t"): """ Reads a table object from given plain delimited file. """ rowNames = [] columnNames = [] matrix = [] first = True for line in f.readlines(): line = line.rstrip() if len(line) == 0: continue row = line.split(separator) if first: columnNames = row[1:] first = False else: rowNames.append(row[0]) matrix.append([float(c) for c in row[1:]]) return Table(rowNames, columnNames, matrix)
def function[readTableFromDelimited, parameter[f, separator]]: constant[ Reads a table object from given plain delimited file. ] variable[rowNames] assign[=] list[[]] variable[columnNames] assign[=] list[[]] variable[matrix] assign[=] list[[]] variable[first] assign[=] constant[True] for taget[name[line]] in starred[call[name[f].readlines, parameter[]]] begin[:] variable[line] assign[=] call[name[line].rstrip, parameter[]] if compare[call[name[len], parameter[name[line]]] equal[==] constant[0]] begin[:] continue variable[row] assign[=] call[name[line].split, parameter[name[separator]]] if name[first] begin[:] variable[columnNames] assign[=] call[name[row]][<ast.Slice object at 0x7da18f7202e0>] variable[first] assign[=] constant[False] return[call[name[Table], parameter[name[rowNames], name[columnNames], name[matrix]]]]
keyword[def] identifier[readTableFromDelimited] ( identifier[f] , identifier[separator] = literal[string] ): literal[string] identifier[rowNames] =[] identifier[columnNames] =[] identifier[matrix] =[] identifier[first] = keyword[True] keyword[for] identifier[line] keyword[in] identifier[f] . identifier[readlines] (): identifier[line] = identifier[line] . identifier[rstrip] () keyword[if] identifier[len] ( identifier[line] )== literal[int] : keyword[continue] identifier[row] = identifier[line] . identifier[split] ( identifier[separator] ) keyword[if] identifier[first] : identifier[columnNames] = identifier[row] [ literal[int] :] identifier[first] = keyword[False] keyword[else] : identifier[rowNames] . identifier[append] ( identifier[row] [ literal[int] ]) identifier[matrix] . identifier[append] ([ identifier[float] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[row] [ literal[int] :]]) keyword[return] identifier[Table] ( identifier[rowNames] , identifier[columnNames] , identifier[matrix] )
def readTableFromDelimited(f, separator='\t'): """ Reads a table object from given plain delimited file. """ rowNames = [] columnNames = [] matrix = [] first = True for line in f.readlines(): line = line.rstrip() if len(line) == 0: continue # depends on [control=['if'], data=[]] row = line.split(separator) if first: columnNames = row[1:] first = False # depends on [control=['if'], data=[]] else: rowNames.append(row[0]) matrix.append([float(c) for c in row[1:]]) # depends on [control=['for'], data=['line']] return Table(rowNames, columnNames, matrix)
def _transform(self, df): """Private transform method of a Transformer. This serves as batch-prediction method for our purposes. """ output_col = self.getOutputCol() label_col = self.getLabelCol() new_schema = copy.deepcopy(df.schema) new_schema.add(StructField(output_col, StringType(), True)) rdd = df.rdd.coalesce(1) features = np.asarray( rdd.map(lambda x: from_vector(x.features)).collect()) # Note that we collect, since executing this on the rdd would require model serialization once again model = model_from_yaml(self.get_keras_model_config()) model.set_weights(self.weights.value) predictions = rdd.ctx.parallelize( model.predict_classes(features)).coalesce(1) predictions = predictions.map(lambda x: tuple(str(x))) results_rdd = rdd.zip(predictions).map(lambda x: x[0] + x[1]) results_df = df.sql_ctx.createDataFrame(results_rdd, new_schema) results_df = results_df.withColumn( output_col, results_df[output_col].cast(DoubleType())) results_df = results_df.withColumn( label_col, results_df[label_col].cast(DoubleType())) return results_df
def function[_transform, parameter[self, df]]: constant[Private transform method of a Transformer. This serves as batch-prediction method for our purposes. ] variable[output_col] assign[=] call[name[self].getOutputCol, parameter[]] variable[label_col] assign[=] call[name[self].getLabelCol, parameter[]] variable[new_schema] assign[=] call[name[copy].deepcopy, parameter[name[df].schema]] call[name[new_schema].add, parameter[call[name[StructField], parameter[name[output_col], call[name[StringType], parameter[]], constant[True]]]]] variable[rdd] assign[=] call[name[df].rdd.coalesce, parameter[constant[1]]] variable[features] assign[=] call[name[np].asarray, parameter[call[call[name[rdd].map, parameter[<ast.Lambda object at 0x7da18ede4ca0>]].collect, parameter[]]]] variable[model] assign[=] call[name[model_from_yaml], parameter[call[name[self].get_keras_model_config, parameter[]]]] call[name[model].set_weights, parameter[name[self].weights.value]] variable[predictions] assign[=] call[call[name[rdd].ctx.parallelize, parameter[call[name[model].predict_classes, parameter[name[features]]]]].coalesce, parameter[constant[1]]] variable[predictions] assign[=] call[name[predictions].map, parameter[<ast.Lambda object at 0x7da18ede53c0>]] variable[results_rdd] assign[=] call[call[name[rdd].zip, parameter[name[predictions]]].map, parameter[<ast.Lambda object at 0x7da18ede5210>]] variable[results_df] assign[=] call[name[df].sql_ctx.createDataFrame, parameter[name[results_rdd], name[new_schema]]] variable[results_df] assign[=] call[name[results_df].withColumn, parameter[name[output_col], call[call[name[results_df]][name[output_col]].cast, parameter[call[name[DoubleType], parameter[]]]]]] variable[results_df] assign[=] call[name[results_df].withColumn, parameter[name[label_col], call[call[name[results_df]][name[label_col]].cast, parameter[call[name[DoubleType], parameter[]]]]]] return[name[results_df]]
keyword[def] identifier[_transform] ( identifier[self] , identifier[df] ): literal[string] identifier[output_col] = identifier[self] . identifier[getOutputCol] () identifier[label_col] = identifier[self] . identifier[getLabelCol] () identifier[new_schema] = identifier[copy] . identifier[deepcopy] ( identifier[df] . identifier[schema] ) identifier[new_schema] . identifier[add] ( identifier[StructField] ( identifier[output_col] , identifier[StringType] (), keyword[True] )) identifier[rdd] = identifier[df] . identifier[rdd] . identifier[coalesce] ( literal[int] ) identifier[features] = identifier[np] . identifier[asarray] ( identifier[rdd] . identifier[map] ( keyword[lambda] identifier[x] : identifier[from_vector] ( identifier[x] . identifier[features] )). identifier[collect] ()) identifier[model] = identifier[model_from_yaml] ( identifier[self] . identifier[get_keras_model_config] ()) identifier[model] . identifier[set_weights] ( identifier[self] . identifier[weights] . identifier[value] ) identifier[predictions] = identifier[rdd] . identifier[ctx] . identifier[parallelize] ( identifier[model] . identifier[predict_classes] ( identifier[features] )). identifier[coalesce] ( literal[int] ) identifier[predictions] = identifier[predictions] . identifier[map] ( keyword[lambda] identifier[x] : identifier[tuple] ( identifier[str] ( identifier[x] ))) identifier[results_rdd] = identifier[rdd] . identifier[zip] ( identifier[predictions] ). identifier[map] ( keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]+ identifier[x] [ literal[int] ]) identifier[results_df] = identifier[df] . identifier[sql_ctx] . identifier[createDataFrame] ( identifier[results_rdd] , identifier[new_schema] ) identifier[results_df] = identifier[results_df] . identifier[withColumn] ( identifier[output_col] , identifier[results_df] [ identifier[output_col] ]. identifier[cast] ( identifier[DoubleType] ())) identifier[results_df] = identifier[results_df] . identifier[withColumn] ( identifier[label_col] , identifier[results_df] [ identifier[label_col] ]. identifier[cast] ( identifier[DoubleType] ())) keyword[return] identifier[results_df]
def _transform(self, df): """Private transform method of a Transformer. This serves as batch-prediction method for our purposes. """ output_col = self.getOutputCol() label_col = self.getLabelCol() new_schema = copy.deepcopy(df.schema) new_schema.add(StructField(output_col, StringType(), True)) rdd = df.rdd.coalesce(1) features = np.asarray(rdd.map(lambda x: from_vector(x.features)).collect()) # Note that we collect, since executing this on the rdd would require model serialization once again model = model_from_yaml(self.get_keras_model_config()) model.set_weights(self.weights.value) predictions = rdd.ctx.parallelize(model.predict_classes(features)).coalesce(1) predictions = predictions.map(lambda x: tuple(str(x))) results_rdd = rdd.zip(predictions).map(lambda x: x[0] + x[1]) results_df = df.sql_ctx.createDataFrame(results_rdd, new_schema) results_df = results_df.withColumn(output_col, results_df[output_col].cast(DoubleType())) results_df = results_df.withColumn(label_col, results_df[label_col].cast(DoubleType())) return results_df
def correct_dactyl_chain(self, scansion: str) -> str: """ Three or more unstressed accents in a row is a broken dactyl chain, best detected and processed backwards. Since this method takes a Procrustean approach to modifying the scansion pattern, it is not used by default in the scan method; however, it is available as an optional keyword parameter, and users looking to further automate the generation of scansion candidates should consider using this as a fall back. :param scansion: scansion with broken dactyl chain; inverted amphibrachs not allowed :return: corrected line of scansion >>> print(HexameterScanner().correct_dactyl_chain( ... "- U U - - U U - - - U U - x")) - - - - - U U - - - U U - x >>> print(HexameterScanner().correct_dactyl_chain( ... "- U U U U - - - - - U U - U")) # doctest: +NORMALIZE_WHITESPACE - - - U U - - - - - U U - U """ mark_list = string_utils.mark_list(scansion) vals = list(scansion.replace(" ", "")) # ignore last two positions, save them feet = [vals.pop(), vals.pop()] length = len(vals) idx = length - 1 while idx > 0: one = vals[idx] two = vals[idx - 1] if idx > 1: three = vals[idx - 2] else: three = "" # Dactyl foot is okay, no corrections if one == self.constants.UNSTRESSED and \ two == self.constants.UNSTRESSED and \ three == self.constants.STRESSED: feet += [one] feet += [two] feet += [three] idx -= 3 continue # Spondee foot is okay, no corrections if one == self.constants.STRESSED and \ two == self.constants.STRESSED: feet += [one] feet += [two] idx -= 2 continue # handle "U U U" foot as "- U U" if one == self.constants.UNSTRESSED and \ two == self.constants.UNSTRESSED and \ three == self.constants.UNSTRESSED: feet += [one] feet += [two] feet += [self.constants.STRESSED] idx -= 3 continue # handle "U U -" foot as "- -" if one == self.constants.STRESSED and \ two == self.constants.UNSTRESSED and \ three == self.constants.UNSTRESSED: feet += [self.constants.STRESSED] feet += [self.constants.STRESSED] idx -= 2 continue # handle "- U" foot as "- -" if one == self.constants.UNSTRESSED and \ two == self.constants.STRESSED: feet += [self.constants.STRESSED] feet += [two] idx -= 2 continue corrected = "".join(feet[::-1]) new_line = list(" " * len(scansion)) for idx, car in enumerate(corrected): new_line[mark_list[idx]] = car return "".join(new_line)
def function[correct_dactyl_chain, parameter[self, scansion]]: constant[ Three or more unstressed accents in a row is a broken dactyl chain, best detected and processed backwards. Since this method takes a Procrustean approach to modifying the scansion pattern, it is not used by default in the scan method; however, it is available as an optional keyword parameter, and users looking to further automate the generation of scansion candidates should consider using this as a fall back. :param scansion: scansion with broken dactyl chain; inverted amphibrachs not allowed :return: corrected line of scansion >>> print(HexameterScanner().correct_dactyl_chain( ... "- U U - - U U - - - U U - x")) - - - - - U U - - - U U - x >>> print(HexameterScanner().correct_dactyl_chain( ... "- U U U U - - - - - U U - U")) # doctest: +NORMALIZE_WHITESPACE - - - U U - - - - - U U - U ] variable[mark_list] assign[=] call[name[string_utils].mark_list, parameter[name[scansion]]] variable[vals] assign[=] call[name[list], parameter[call[name[scansion].replace, parameter[constant[ ], constant[]]]]] variable[feet] assign[=] list[[<ast.Call object at 0x7da1b26acd00>, <ast.Call object at 0x7da1b26ae140>]] variable[length] assign[=] call[name[len], parameter[name[vals]]] variable[idx] assign[=] binary_operation[name[length] - constant[1]] while compare[name[idx] greater[>] constant[0]] begin[:] variable[one] assign[=] call[name[vals]][name[idx]] variable[two] assign[=] call[name[vals]][binary_operation[name[idx] - constant[1]]] if compare[name[idx] greater[>] constant[1]] begin[:] variable[three] assign[=] call[name[vals]][binary_operation[name[idx] - constant[2]]] if <ast.BoolOp object at 0x7da1b26aeef0> begin[:] <ast.AugAssign object at 0x7da1b26add50> <ast.AugAssign object at 0x7da1b26af520> <ast.AugAssign object at 0x7da1b26ae470> <ast.AugAssign object at 0x7da1b26af9d0> continue if <ast.BoolOp object at 0x7da1b26ad0c0> begin[:] <ast.AugAssign object at 0x7da1b26acf40> <ast.AugAssign object at 0x7da1b26adba0> <ast.AugAssign object at 0x7da1b26ac9d0> continue if <ast.BoolOp object at 0x7da1b26acdc0> begin[:] <ast.AugAssign object at 0x7da207f99de0> <ast.AugAssign object at 0x7da207f99ed0> <ast.AugAssign object at 0x7da207f98f70> <ast.AugAssign object at 0x7da207f99120> continue if <ast.BoolOp object at 0x7da207f999c0> begin[:] <ast.AugAssign object at 0x7da207f99b10> <ast.AugAssign object at 0x7da207f9bfa0> <ast.AugAssign object at 0x7da207f9b8b0> continue if <ast.BoolOp object at 0x7da207f98c40> begin[:] <ast.AugAssign object at 0x7da207f9a710> <ast.AugAssign object at 0x7da207f9a9e0> <ast.AugAssign object at 0x7da207f98bb0> continue variable[corrected] assign[=] call[constant[].join, parameter[call[name[feet]][<ast.Slice object at 0x7da207f9a140>]]] variable[new_line] assign[=] call[name[list], parameter[binary_operation[constant[ ] * call[name[len], parameter[name[scansion]]]]]] for taget[tuple[[<ast.Name object at 0x7da207f98040>, <ast.Name object at 0x7da207f99600>]]] in starred[call[name[enumerate], parameter[name[corrected]]]] begin[:] call[name[new_line]][call[name[mark_list]][name[idx]]] assign[=] name[car] return[call[constant[].join, parameter[name[new_line]]]]
keyword[def] identifier[correct_dactyl_chain] ( identifier[self] , identifier[scansion] : identifier[str] )-> identifier[str] : literal[string] identifier[mark_list] = identifier[string_utils] . identifier[mark_list] ( identifier[scansion] ) identifier[vals] = identifier[list] ( identifier[scansion] . identifier[replace] ( literal[string] , literal[string] )) identifier[feet] =[ identifier[vals] . identifier[pop] (), identifier[vals] . identifier[pop] ()] identifier[length] = identifier[len] ( identifier[vals] ) identifier[idx] = identifier[length] - literal[int] keyword[while] identifier[idx] > literal[int] : identifier[one] = identifier[vals] [ identifier[idx] ] identifier[two] = identifier[vals] [ identifier[idx] - literal[int] ] keyword[if] identifier[idx] > literal[int] : identifier[three] = identifier[vals] [ identifier[idx] - literal[int] ] keyword[else] : identifier[three] = literal[string] keyword[if] identifier[one] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] keyword[and] identifier[two] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] keyword[and] identifier[three] == identifier[self] . identifier[constants] . identifier[STRESSED] : identifier[feet] +=[ identifier[one] ] identifier[feet] +=[ identifier[two] ] identifier[feet] +=[ identifier[three] ] identifier[idx] -= literal[int] keyword[continue] keyword[if] identifier[one] == identifier[self] . identifier[constants] . identifier[STRESSED] keyword[and] identifier[two] == identifier[self] . identifier[constants] . identifier[STRESSED] : identifier[feet] +=[ identifier[one] ] identifier[feet] +=[ identifier[two] ] identifier[idx] -= literal[int] keyword[continue] keyword[if] identifier[one] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] keyword[and] identifier[two] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] keyword[and] identifier[three] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] : identifier[feet] +=[ identifier[one] ] identifier[feet] +=[ identifier[two] ] identifier[feet] +=[ identifier[self] . identifier[constants] . identifier[STRESSED] ] identifier[idx] -= literal[int] keyword[continue] keyword[if] identifier[one] == identifier[self] . identifier[constants] . identifier[STRESSED] keyword[and] identifier[two] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] keyword[and] identifier[three] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] : identifier[feet] +=[ identifier[self] . identifier[constants] . identifier[STRESSED] ] identifier[feet] +=[ identifier[self] . identifier[constants] . identifier[STRESSED] ] identifier[idx] -= literal[int] keyword[continue] keyword[if] identifier[one] == identifier[self] . identifier[constants] . identifier[UNSTRESSED] keyword[and] identifier[two] == identifier[self] . identifier[constants] . identifier[STRESSED] : identifier[feet] +=[ identifier[self] . identifier[constants] . identifier[STRESSED] ] identifier[feet] +=[ identifier[two] ] identifier[idx] -= literal[int] keyword[continue] identifier[corrected] = literal[string] . identifier[join] ( identifier[feet] [::- literal[int] ]) identifier[new_line] = identifier[list] ( literal[string] * identifier[len] ( identifier[scansion] )) keyword[for] identifier[idx] , identifier[car] keyword[in] identifier[enumerate] ( identifier[corrected] ): identifier[new_line] [ identifier[mark_list] [ identifier[idx] ]]= identifier[car] keyword[return] literal[string] . identifier[join] ( identifier[new_line] )
def correct_dactyl_chain(self, scansion: str) -> str: """ Three or more unstressed accents in a row is a broken dactyl chain, best detected and processed backwards. Since this method takes a Procrustean approach to modifying the scansion pattern, it is not used by default in the scan method; however, it is available as an optional keyword parameter, and users looking to further automate the generation of scansion candidates should consider using this as a fall back. :param scansion: scansion with broken dactyl chain; inverted amphibrachs not allowed :return: corrected line of scansion >>> print(HexameterScanner().correct_dactyl_chain( ... "- U U - - U U - - - U U - x")) - - - - - U U - - - U U - x >>> print(HexameterScanner().correct_dactyl_chain( ... "- U U U U - - - - - U U - U")) # doctest: +NORMALIZE_WHITESPACE - - - U U - - - - - U U - U """ mark_list = string_utils.mark_list(scansion) vals = list(scansion.replace(' ', '')) # ignore last two positions, save them feet = [vals.pop(), vals.pop()] length = len(vals) idx = length - 1 while idx > 0: one = vals[idx] two = vals[idx - 1] if idx > 1: three = vals[idx - 2] # depends on [control=['if'], data=['idx']] else: three = '' # Dactyl foot is okay, no corrections if one == self.constants.UNSTRESSED and two == self.constants.UNSTRESSED and (three == self.constants.STRESSED): feet += [one] feet += [two] feet += [three] idx -= 3 continue # depends on [control=['if'], data=[]] # Spondee foot is okay, no corrections if one == self.constants.STRESSED and two == self.constants.STRESSED: feet += [one] feet += [two] idx -= 2 continue # depends on [control=['if'], data=[]] # handle "U U U" foot as "- U U" if one == self.constants.UNSTRESSED and two == self.constants.UNSTRESSED and (three == self.constants.UNSTRESSED): feet += [one] feet += [two] feet += [self.constants.STRESSED] idx -= 3 continue # depends on [control=['if'], data=[]] # handle "U U -" foot as "- -" if one == self.constants.STRESSED and two == self.constants.UNSTRESSED and (three == self.constants.UNSTRESSED): feet += [self.constants.STRESSED] feet += [self.constants.STRESSED] idx -= 2 continue # depends on [control=['if'], data=[]] # handle "- U" foot as "- -" if one == self.constants.UNSTRESSED and two == self.constants.STRESSED: feet += [self.constants.STRESSED] feet += [two] idx -= 2 continue # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['idx']] corrected = ''.join(feet[::-1]) new_line = list(' ' * len(scansion)) for (idx, car) in enumerate(corrected): new_line[mark_list[idx]] = car # depends on [control=['for'], data=[]] return ''.join(new_line)
def get_cfg(ast_func): """ Traverses the AST and returns the corresponding CFG :param ast_func: The AST representation of function :type ast_func: ast.Function :returns: The CFG representation of the function :rtype: cfg.Function """ cfg_func = cfg.Function() for ast_var in ast_func.input_variable_list: cfg_var = cfg_func.get_variable(ast_var.name) cfg_func.add_input_variable(cfg_var) for ast_var in ast_func.output_variable_list: cfg_var = cfg_func.get_variable(ast_var.name) cfg_func.add_output_variable(cfg_var) bb_start = cfg.BasicBlock() cfg_func.add_basic_block(bb_start) for stmt in ast_func.body: bb_temp = bb_start bb_temp = process_cfg(stmt, bb_temp, cfg_func) cfg_func.clean_up() cfg_func.add_summary(ast_func.summary) return cfg_func
def function[get_cfg, parameter[ast_func]]: constant[ Traverses the AST and returns the corresponding CFG :param ast_func: The AST representation of function :type ast_func: ast.Function :returns: The CFG representation of the function :rtype: cfg.Function ] variable[cfg_func] assign[=] call[name[cfg].Function, parameter[]] for taget[name[ast_var]] in starred[name[ast_func].input_variable_list] begin[:] variable[cfg_var] assign[=] call[name[cfg_func].get_variable, parameter[name[ast_var].name]] call[name[cfg_func].add_input_variable, parameter[name[cfg_var]]] for taget[name[ast_var]] in starred[name[ast_func].output_variable_list] begin[:] variable[cfg_var] assign[=] call[name[cfg_func].get_variable, parameter[name[ast_var].name]] call[name[cfg_func].add_output_variable, parameter[name[cfg_var]]] variable[bb_start] assign[=] call[name[cfg].BasicBlock, parameter[]] call[name[cfg_func].add_basic_block, parameter[name[bb_start]]] for taget[name[stmt]] in starred[name[ast_func].body] begin[:] variable[bb_temp] assign[=] name[bb_start] variable[bb_temp] assign[=] call[name[process_cfg], parameter[name[stmt], name[bb_temp], name[cfg_func]]] call[name[cfg_func].clean_up, parameter[]] call[name[cfg_func].add_summary, parameter[name[ast_func].summary]] return[name[cfg_func]]
keyword[def] identifier[get_cfg] ( identifier[ast_func] ): literal[string] identifier[cfg_func] = identifier[cfg] . identifier[Function] () keyword[for] identifier[ast_var] keyword[in] identifier[ast_func] . identifier[input_variable_list] : identifier[cfg_var] = identifier[cfg_func] . identifier[get_variable] ( identifier[ast_var] . identifier[name] ) identifier[cfg_func] . identifier[add_input_variable] ( identifier[cfg_var] ) keyword[for] identifier[ast_var] keyword[in] identifier[ast_func] . identifier[output_variable_list] : identifier[cfg_var] = identifier[cfg_func] . identifier[get_variable] ( identifier[ast_var] . identifier[name] ) identifier[cfg_func] . identifier[add_output_variable] ( identifier[cfg_var] ) identifier[bb_start] = identifier[cfg] . identifier[BasicBlock] () identifier[cfg_func] . identifier[add_basic_block] ( identifier[bb_start] ) keyword[for] identifier[stmt] keyword[in] identifier[ast_func] . identifier[body] : identifier[bb_temp] = identifier[bb_start] identifier[bb_temp] = identifier[process_cfg] ( identifier[stmt] , identifier[bb_temp] , identifier[cfg_func] ) identifier[cfg_func] . identifier[clean_up] () identifier[cfg_func] . identifier[add_summary] ( identifier[ast_func] . identifier[summary] ) keyword[return] identifier[cfg_func]
def get_cfg(ast_func): """ Traverses the AST and returns the corresponding CFG :param ast_func: The AST representation of function :type ast_func: ast.Function :returns: The CFG representation of the function :rtype: cfg.Function """ cfg_func = cfg.Function() for ast_var in ast_func.input_variable_list: cfg_var = cfg_func.get_variable(ast_var.name) cfg_func.add_input_variable(cfg_var) # depends on [control=['for'], data=['ast_var']] for ast_var in ast_func.output_variable_list: cfg_var = cfg_func.get_variable(ast_var.name) cfg_func.add_output_variable(cfg_var) # depends on [control=['for'], data=['ast_var']] bb_start = cfg.BasicBlock() cfg_func.add_basic_block(bb_start) for stmt in ast_func.body: bb_temp = bb_start bb_temp = process_cfg(stmt, bb_temp, cfg_func) # depends on [control=['for'], data=['stmt']] cfg_func.clean_up() cfg_func.add_summary(ast_func.summary) return cfg_func
def escape_LDAP(ldap_string): # type: (str) -> str # pylint: disable=C0103 """ Escape a string to let it go in an LDAP filter :param ldap_string: The string to escape :return: The protected string """ if not ldap_string: # No content return ldap_string # Protect escape character previously in the string assert is_string(ldap_string) ldap_string = ldap_string.replace( ESCAPE_CHARACTER, ESCAPE_CHARACTER + ESCAPE_CHARACTER ) # Leading space if ldap_string.startswith(" "): ldap_string = "\\ {0}".format(ldap_string[1:]) # Trailing space if ldap_string.endswith(" "): ldap_string = "{0}\\ ".format(ldap_string[:-1]) # Escape other characters for escaped in ESCAPED_CHARACTERS: ldap_string = ldap_string.replace(escaped, ESCAPE_CHARACTER + escaped) return ldap_string
def function[escape_LDAP, parameter[ldap_string]]: constant[ Escape a string to let it go in an LDAP filter :param ldap_string: The string to escape :return: The protected string ] if <ast.UnaryOp object at 0x7da20c6e4be0> begin[:] return[name[ldap_string]] assert[call[name[is_string], parameter[name[ldap_string]]]] variable[ldap_string] assign[=] call[name[ldap_string].replace, parameter[name[ESCAPE_CHARACTER], binary_operation[name[ESCAPE_CHARACTER] + name[ESCAPE_CHARACTER]]]] if call[name[ldap_string].startswith, parameter[constant[ ]]] begin[:] variable[ldap_string] assign[=] call[constant[\ {0}].format, parameter[call[name[ldap_string]][<ast.Slice object at 0x7da20c6e5030>]]] if call[name[ldap_string].endswith, parameter[constant[ ]]] begin[:] variable[ldap_string] assign[=] call[constant[{0}\ ].format, parameter[call[name[ldap_string]][<ast.Slice object at 0x7da20c6e70a0>]]] for taget[name[escaped]] in starred[name[ESCAPED_CHARACTERS]] begin[:] variable[ldap_string] assign[=] call[name[ldap_string].replace, parameter[name[escaped], binary_operation[name[ESCAPE_CHARACTER] + name[escaped]]]] return[name[ldap_string]]
keyword[def] identifier[escape_LDAP] ( identifier[ldap_string] ): literal[string] keyword[if] keyword[not] identifier[ldap_string] : keyword[return] identifier[ldap_string] keyword[assert] identifier[is_string] ( identifier[ldap_string] ) identifier[ldap_string] = identifier[ldap_string] . identifier[replace] ( identifier[ESCAPE_CHARACTER] , identifier[ESCAPE_CHARACTER] + identifier[ESCAPE_CHARACTER] ) keyword[if] identifier[ldap_string] . identifier[startswith] ( literal[string] ): identifier[ldap_string] = literal[string] . identifier[format] ( identifier[ldap_string] [ literal[int] :]) keyword[if] identifier[ldap_string] . identifier[endswith] ( literal[string] ): identifier[ldap_string] = literal[string] . identifier[format] ( identifier[ldap_string] [:- literal[int] ]) keyword[for] identifier[escaped] keyword[in] identifier[ESCAPED_CHARACTERS] : identifier[ldap_string] = identifier[ldap_string] . identifier[replace] ( identifier[escaped] , identifier[ESCAPE_CHARACTER] + identifier[escaped] ) keyword[return] identifier[ldap_string]
def escape_LDAP(ldap_string): # type: (str) -> str # pylint: disable=C0103 '\n Escape a string to let it go in an LDAP filter\n\n :param ldap_string: The string to escape\n :return: The protected string\n ' if not ldap_string: # No content return ldap_string # depends on [control=['if'], data=[]] # Protect escape character previously in the string assert is_string(ldap_string) ldap_string = ldap_string.replace(ESCAPE_CHARACTER, ESCAPE_CHARACTER + ESCAPE_CHARACTER) # Leading space if ldap_string.startswith(' '): ldap_string = '\\ {0}'.format(ldap_string[1:]) # depends on [control=['if'], data=[]] # Trailing space if ldap_string.endswith(' '): ldap_string = '{0}\\ '.format(ldap_string[:-1]) # depends on [control=['if'], data=[]] # Escape other characters for escaped in ESCAPED_CHARACTERS: ldap_string = ldap_string.replace(escaped, ESCAPE_CHARACTER + escaped) # depends on [control=['for'], data=['escaped']] return ldap_string
def get_config_from_file(conf_properties_files): """Reads properties files and saves them to a config object :param conf_properties_files: comma-separated list of properties files :returns: config object """ # Initialize the config object config = ExtendedConfigParser() logger = logging.getLogger(__name__) # Configure properties (last files could override properties) found = False files_list = conf_properties_files.split(';') for conf_properties_file in files_list: result = config.read(conf_properties_file) if len(result) == 0: message = 'Properties config file not found: %s' if len(files_list) == 1: logger.error(message, conf_properties_file) raise Exception(message % conf_properties_file) else: logger.debug(message, conf_properties_file) else: logger.debug('Reading properties from file: %s', conf_properties_file) found = True if not found: message = 'Any of the properties config files has been found' logger.error(message) raise Exception(message) return config
def function[get_config_from_file, parameter[conf_properties_files]]: constant[Reads properties files and saves them to a config object :param conf_properties_files: comma-separated list of properties files :returns: config object ] variable[config] assign[=] call[name[ExtendedConfigParser], parameter[]] variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]] variable[found] assign[=] constant[False] variable[files_list] assign[=] call[name[conf_properties_files].split, parameter[constant[;]]] for taget[name[conf_properties_file]] in starred[name[files_list]] begin[:] variable[result] assign[=] call[name[config].read, parameter[name[conf_properties_file]]] if compare[call[name[len], parameter[name[result]]] equal[==] constant[0]] begin[:] variable[message] assign[=] constant[Properties config file not found: %s] if compare[call[name[len], parameter[name[files_list]]] equal[==] constant[1]] begin[:] call[name[logger].error, parameter[name[message], name[conf_properties_file]]] <ast.Raise object at 0x7da207f03040> if <ast.UnaryOp object at 0x7da207f008b0> begin[:] variable[message] assign[=] constant[Any of the properties config files has been found] call[name[logger].error, parameter[name[message]]] <ast.Raise object at 0x7da207f02860> return[name[config]]
keyword[def] identifier[get_config_from_file] ( identifier[conf_properties_files] ): literal[string] identifier[config] = identifier[ExtendedConfigParser] () identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] ) identifier[found] = keyword[False] identifier[files_list] = identifier[conf_properties_files] . identifier[split] ( literal[string] ) keyword[for] identifier[conf_properties_file] keyword[in] identifier[files_list] : identifier[result] = identifier[config] . identifier[read] ( identifier[conf_properties_file] ) keyword[if] identifier[len] ( identifier[result] )== literal[int] : identifier[message] = literal[string] keyword[if] identifier[len] ( identifier[files_list] )== literal[int] : identifier[logger] . identifier[error] ( identifier[message] , identifier[conf_properties_file] ) keyword[raise] identifier[Exception] ( identifier[message] % identifier[conf_properties_file] ) keyword[else] : identifier[logger] . identifier[debug] ( identifier[message] , identifier[conf_properties_file] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] , identifier[conf_properties_file] ) identifier[found] = keyword[True] keyword[if] keyword[not] identifier[found] : identifier[message] = literal[string] identifier[logger] . identifier[error] ( identifier[message] ) keyword[raise] identifier[Exception] ( identifier[message] ) keyword[return] identifier[config]
def get_config_from_file(conf_properties_files): """Reads properties files and saves them to a config object :param conf_properties_files: comma-separated list of properties files :returns: config object """ # Initialize the config object config = ExtendedConfigParser() logger = logging.getLogger(__name__) # Configure properties (last files could override properties) found = False files_list = conf_properties_files.split(';') for conf_properties_file in files_list: result = config.read(conf_properties_file) if len(result) == 0: message = 'Properties config file not found: %s' if len(files_list) == 1: logger.error(message, conf_properties_file) raise Exception(message % conf_properties_file) # depends on [control=['if'], data=[]] else: logger.debug(message, conf_properties_file) # depends on [control=['if'], data=[]] else: logger.debug('Reading properties from file: %s', conf_properties_file) found = True # depends on [control=['for'], data=['conf_properties_file']] if not found: message = 'Any of the properties config files has been found' logger.error(message) raise Exception(message) # depends on [control=['if'], data=[]] return config
def mean_sq_jump_dist(self, discard_frac=0.1): """Mean squared jumping distance estimated from chain. Parameters ---------- discard_frac: float fraction of iterations to discard at the beginning (as a burn-in) Returns ------- float """ discard = int(self.niter * discard_frac) return msjd(self.chain.theta[discard:])
def function[mean_sq_jump_dist, parameter[self, discard_frac]]: constant[Mean squared jumping distance estimated from chain. Parameters ---------- discard_frac: float fraction of iterations to discard at the beginning (as a burn-in) Returns ------- float ] variable[discard] assign[=] call[name[int], parameter[binary_operation[name[self].niter * name[discard_frac]]]] return[call[name[msjd], parameter[call[name[self].chain.theta][<ast.Slice object at 0x7da18fe93220>]]]]
keyword[def] identifier[mean_sq_jump_dist] ( identifier[self] , identifier[discard_frac] = literal[int] ): literal[string] identifier[discard] = identifier[int] ( identifier[self] . identifier[niter] * identifier[discard_frac] ) keyword[return] identifier[msjd] ( identifier[self] . identifier[chain] . identifier[theta] [ identifier[discard] :])
def mean_sq_jump_dist(self, discard_frac=0.1): """Mean squared jumping distance estimated from chain. Parameters ---------- discard_frac: float fraction of iterations to discard at the beginning (as a burn-in) Returns ------- float """ discard = int(self.niter * discard_frac) return msjd(self.chain.theta[discard:])
def dbmax10years(self, value=None): """ Corresponds to IDD Field `dbmax10years` 10-year return period values for maximum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmax10years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `dbmax10years`'.format(value)) self._dbmax10years = value
def function[dbmax10years, parameter[self, value]]: constant[ Corresponds to IDD Field `dbmax10years` 10-year return period values for maximum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmax10years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value ] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b0feffd0> name[self]._dbmax10years assign[=] name[value]
keyword[def] identifier[dbmax10years] ( identifier[self] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[float] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) identifier[self] . identifier[_dbmax10years] = identifier[value]
def dbmax10years(self, value=None): """ Corresponds to IDD Field `dbmax10years` 10-year return period values for maximum extreme dry-bulb temperature Args: value (float): value for IDD Field `dbmax10years` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('value {} need to be of type float for field `dbmax10years`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']] self._dbmax10years = value
def update(self, friendly_name): """ Update the FunctionInstance :param unicode friendly_name: The friendly_name :returns: Updated FunctionInstance :rtype: twilio.rest.serverless.v1.service.function.FunctionInstance """ data = values.of({'FriendlyName': friendly_name, }) payload = self._version.update( 'POST', self._uri, data=data, ) return FunctionInstance( self._version, payload, service_sid=self._solution['service_sid'], sid=self._solution['sid'], )
def function[update, parameter[self, friendly_name]]: constant[ Update the FunctionInstance :param unicode friendly_name: The friendly_name :returns: Updated FunctionInstance :rtype: twilio.rest.serverless.v1.service.function.FunctionInstance ] variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da1b2344b80>], [<ast.Name object at 0x7da1b2347af0>]]]] variable[payload] assign[=] call[name[self]._version.update, parameter[constant[POST], name[self]._uri]] return[call[name[FunctionInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[update] ( identifier[self] , identifier[friendly_name] ): literal[string] identifier[data] = identifier[values] . identifier[of] ({ literal[string] : identifier[friendly_name] ,}) identifier[payload] = identifier[self] . identifier[_version] . identifier[update] ( literal[string] , identifier[self] . identifier[_uri] , identifier[data] = identifier[data] , ) keyword[return] identifier[FunctionInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ], )
def update(self, friendly_name): """ Update the FunctionInstance :param unicode friendly_name: The friendly_name :returns: Updated FunctionInstance :rtype: twilio.rest.serverless.v1.service.function.FunctionInstance """ data = values.of({'FriendlyName': friendly_name}) payload = self._version.update('POST', self._uri, data=data) return FunctionInstance(self._version, payload, service_sid=self._solution['service_sid'], sid=self._solution['sid'])
def get_relationships(schema, model_field=False): """Return relationship fields of a schema :param Schema schema: a marshmallow schema :param list: list of relationship fields of a schema """ relationships = [key for (key, value) in schema._declared_fields.items() if isinstance(value, Relationship)] if model_field is True: relationships = [get_model_field(schema, key) for key in relationships] return relationships
def function[get_relationships, parameter[schema, model_field]]: constant[Return relationship fields of a schema :param Schema schema: a marshmallow schema :param list: list of relationship fields of a schema ] variable[relationships] assign[=] <ast.ListComp object at 0x7da1b17fae00> if compare[name[model_field] is constant[True]] begin[:] variable[relationships] assign[=] <ast.ListComp object at 0x7da1b1642470> return[name[relationships]]
keyword[def] identifier[get_relationships] ( identifier[schema] , identifier[model_field] = keyword[False] ): literal[string] identifier[relationships] =[ identifier[key] keyword[for] ( identifier[key] , identifier[value] ) keyword[in] identifier[schema] . identifier[_declared_fields] . identifier[items] () keyword[if] identifier[isinstance] ( identifier[value] , identifier[Relationship] )] keyword[if] identifier[model_field] keyword[is] keyword[True] : identifier[relationships] =[ identifier[get_model_field] ( identifier[schema] , identifier[key] ) keyword[for] identifier[key] keyword[in] identifier[relationships] ] keyword[return] identifier[relationships]
def get_relationships(schema, model_field=False): """Return relationship fields of a schema :param Schema schema: a marshmallow schema :param list: list of relationship fields of a schema """ relationships = [key for (key, value) in schema._declared_fields.items() if isinstance(value, Relationship)] if model_field is True: relationships = [get_model_field(schema, key) for key in relationships] # depends on [control=['if'], data=[]] return relationships
def get(self, cycle_list, dataitem=None, isotope=None, sparse=1): """ Simple function that simply calls h5T.py get method. There are three ways to call this function. Parameters ---------- cycle_list : string, list If cycle_list is a string, then get interpates the argument cycle_list as a dataitem and fetches the dataitem for all cycles. If cycle_list is a list, then get fetches the dataitem for the cycles in the list. dataitem : string, optional fetches the dataitem from the list of cycles. If dataitem is None, then cycle_list must be a string and will be used as dataitem. If dataitem is an isotope in the form 'H-2', it then returns the result of, >>> self.get(cycle_list,'iso_massf',dataitem) The default is None. isotope : string, optional The name of the isotope to fetch, it must be in the form 'H-2'. If isotope is None, then cycle_list or dataitem must be a string. The default is None. sparse : integer, optional Implements a sparsity factor on the fetched data. The default is 1. Notes ----- Calling the get method directly in the form, >>> self.get(cycle_list,'iso_massf',dataitem) is depricated, and only included for compatibility. """ return self.se.get(cycle_list,dataitem,isotope,sparse)
def function[get, parameter[self, cycle_list, dataitem, isotope, sparse]]: constant[ Simple function that simply calls h5T.py get method. There are three ways to call this function. Parameters ---------- cycle_list : string, list If cycle_list is a string, then get interpates the argument cycle_list as a dataitem and fetches the dataitem for all cycles. If cycle_list is a list, then get fetches the dataitem for the cycles in the list. dataitem : string, optional fetches the dataitem from the list of cycles. If dataitem is None, then cycle_list must be a string and will be used as dataitem. If dataitem is an isotope in the form 'H-2', it then returns the result of, >>> self.get(cycle_list,'iso_massf',dataitem) The default is None. isotope : string, optional The name of the isotope to fetch, it must be in the form 'H-2'. If isotope is None, then cycle_list or dataitem must be a string. The default is None. sparse : integer, optional Implements a sparsity factor on the fetched data. The default is 1. Notes ----- Calling the get method directly in the form, >>> self.get(cycle_list,'iso_massf',dataitem) is depricated, and only included for compatibility. ] return[call[name[self].se.get, parameter[name[cycle_list], name[dataitem], name[isotope], name[sparse]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[cycle_list] , identifier[dataitem] = keyword[None] , identifier[isotope] = keyword[None] , identifier[sparse] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[se] . identifier[get] ( identifier[cycle_list] , identifier[dataitem] , identifier[isotope] , identifier[sparse] )
def get(self, cycle_list, dataitem=None, isotope=None, sparse=1): """ Simple function that simply calls h5T.py get method. There are three ways to call this function. Parameters ---------- cycle_list : string, list If cycle_list is a string, then get interpates the argument cycle_list as a dataitem and fetches the dataitem for all cycles. If cycle_list is a list, then get fetches the dataitem for the cycles in the list. dataitem : string, optional fetches the dataitem from the list of cycles. If dataitem is None, then cycle_list must be a string and will be used as dataitem. If dataitem is an isotope in the form 'H-2', it then returns the result of, >>> self.get(cycle_list,'iso_massf',dataitem) The default is None. isotope : string, optional The name of the isotope to fetch, it must be in the form 'H-2'. If isotope is None, then cycle_list or dataitem must be a string. The default is None. sparse : integer, optional Implements a sparsity factor on the fetched data. The default is 1. Notes ----- Calling the get method directly in the form, >>> self.get(cycle_list,'iso_massf',dataitem) is depricated, and only included for compatibility. """ return self.se.get(cycle_list, dataitem, isotope, sparse)
def _string_literal_to_string(string_literal_token): """Converts the StringLiteral token to a plain string: get text content, removes quote characters, and unescapes it. :param string_literal_token: The string literal :return: """ token_text = string_literal_token.getText() return token_text[1:-1].replace(u"\\'", u"'"). \ replace(u"\\\\", u"\\")
def function[_string_literal_to_string, parameter[string_literal_token]]: constant[Converts the StringLiteral token to a plain string: get text content, removes quote characters, and unescapes it. :param string_literal_token: The string literal :return: ] variable[token_text] assign[=] call[name[string_literal_token].getText, parameter[]] return[call[call[call[name[token_text]][<ast.Slice object at 0x7da1b04da2c0>].replace, parameter[constant[\'], constant[']]].replace, parameter[constant[\\], constant[\]]]]
keyword[def] identifier[_string_literal_to_string] ( identifier[string_literal_token] ): literal[string] identifier[token_text] = identifier[string_literal_token] . identifier[getText] () keyword[return] identifier[token_text] [ literal[int] :- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] )
def _string_literal_to_string(string_literal_token): """Converts the StringLiteral token to a plain string: get text content, removes quote characters, and unescapes it. :param string_literal_token: The string literal :return: """ token_text = string_literal_token.getText() return token_text[1:-1].replace(u"\\'", u"'").replace(u'\\\\', u'\\')
def add_handlers(web_app, config): """Add the appropriate handlers to the web app. """ base_url = web_app.settings['base_url'] url = ujoin(base_url, config.page_url) assets_dir = config.assets_dir package_file = os.path.join(assets_dir, 'package.json') with open(package_file) as fid: data = json.load(fid) config.version = config.version or data['version'] config.name = config.name or data['name'] handlers = [ # TODO Redirect to /tree (url + r'/?', NAppHandler, {'config': config, 'page': 'tree'}), (url + r"/tree%s" % path_regex, NAppHandler, {'config': config, 'page': 'tree'}), (url + r"/edit%s" % path_regex, NAppHandler, {'config': config, 'page': 'edit'}), (url + r"/view%s" % path_regex, NAppHandler, {'config': config, 'page': 'view'}), (url + r"/static/(.*)", FileFindHandler, {'path': assets_dir}), ] web_app.add_handlers(".*$", handlers)
def function[add_handlers, parameter[web_app, config]]: constant[Add the appropriate handlers to the web app. ] variable[base_url] assign[=] call[name[web_app].settings][constant[base_url]] variable[url] assign[=] call[name[ujoin], parameter[name[base_url], name[config].page_url]] variable[assets_dir] assign[=] name[config].assets_dir variable[package_file] assign[=] call[name[os].path.join, parameter[name[assets_dir], constant[package.json]]] with call[name[open], parameter[name[package_file]]] begin[:] variable[data] assign[=] call[name[json].load, parameter[name[fid]]] name[config].version assign[=] <ast.BoolOp object at 0x7da2041d8af0> name[config].name assign[=] <ast.BoolOp object at 0x7da2041da110> variable[handlers] assign[=] list[[<ast.Tuple object at 0x7da18eb54b80>, <ast.Tuple object at 0x7da18eb54df0>, <ast.Tuple object at 0x7da18c4ccd90>, <ast.Tuple object at 0x7da18c4ce890>, <ast.Tuple object at 0x7da18c4cf580>]] call[name[web_app].add_handlers, parameter[constant[.*$], name[handlers]]]
keyword[def] identifier[add_handlers] ( identifier[web_app] , identifier[config] ): literal[string] identifier[base_url] = identifier[web_app] . identifier[settings] [ literal[string] ] identifier[url] = identifier[ujoin] ( identifier[base_url] , identifier[config] . identifier[page_url] ) identifier[assets_dir] = identifier[config] . identifier[assets_dir] identifier[package_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[assets_dir] , literal[string] ) keyword[with] identifier[open] ( identifier[package_file] ) keyword[as] identifier[fid] : identifier[data] = identifier[json] . identifier[load] ( identifier[fid] ) identifier[config] . identifier[version] = identifier[config] . identifier[version] keyword[or] identifier[data] [ literal[string] ] identifier[config] . identifier[name] = identifier[config] . identifier[name] keyword[or] identifier[data] [ literal[string] ] identifier[handlers] =[ ( identifier[url] + literal[string] , identifier[NAppHandler] ,{ literal[string] : identifier[config] , literal[string] : literal[string] }), ( identifier[url] + literal[string] % identifier[path_regex] , identifier[NAppHandler] ,{ literal[string] : identifier[config] , literal[string] : literal[string] }), ( identifier[url] + literal[string] % identifier[path_regex] , identifier[NAppHandler] ,{ literal[string] : identifier[config] , literal[string] : literal[string] }), ( identifier[url] + literal[string] % identifier[path_regex] , identifier[NAppHandler] ,{ literal[string] : identifier[config] , literal[string] : literal[string] }), ( identifier[url] + literal[string] , identifier[FileFindHandler] ,{ literal[string] : identifier[assets_dir] }), ] identifier[web_app] . identifier[add_handlers] ( literal[string] , identifier[handlers] )
def add_handlers(web_app, config): """Add the appropriate handlers to the web app. """ base_url = web_app.settings['base_url'] url = ujoin(base_url, config.page_url) assets_dir = config.assets_dir package_file = os.path.join(assets_dir, 'package.json') with open(package_file) as fid: data = json.load(fid) # depends on [control=['with'], data=['fid']] config.version = config.version or data['version'] config.name = config.name or data['name'] # TODO Redirect to /tree handlers = [(url + '/?', NAppHandler, {'config': config, 'page': 'tree'}), (url + '/tree%s' % path_regex, NAppHandler, {'config': config, 'page': 'tree'}), (url + '/edit%s' % path_regex, NAppHandler, {'config': config, 'page': 'edit'}), (url + '/view%s' % path_regex, NAppHandler, {'config': config, 'page': 'view'}), (url + '/static/(.*)', FileFindHandler, {'path': assets_dir})] web_app.add_handlers('.*$', handlers)
def fields(self): """ Return the fields specified in the pattern using Python's formatting mini-language. """ parse = list(string.Formatter().parse(self.pattern)) return [f for f in zip(*parse)[1] if f is not None]
def function[fields, parameter[self]]: constant[ Return the fields specified in the pattern using Python's formatting mini-language. ] variable[parse] assign[=] call[name[list], parameter[call[call[name[string].Formatter, parameter[]].parse, parameter[name[self].pattern]]]] return[<ast.ListComp object at 0x7da1afef1750>]
keyword[def] identifier[fields] ( identifier[self] ): literal[string] identifier[parse] = identifier[list] ( identifier[string] . identifier[Formatter] (). identifier[parse] ( identifier[self] . identifier[pattern] )) keyword[return] [ identifier[f] keyword[for] identifier[f] keyword[in] identifier[zip] (* identifier[parse] )[ literal[int] ] keyword[if] identifier[f] keyword[is] keyword[not] keyword[None] ]
def fields(self): """ Return the fields specified in the pattern using Python's formatting mini-language. """ parse = list(string.Formatter().parse(self.pattern)) return [f for f in zip(*parse)[1] if f is not None]
def getServiceMessages(self, remote): """Get service messages from CCU / Homegear""" try: return self.proxies["%s-%s" % (self._interface_id, remote)].getServiceMessages() except Exception as err: LOG.debug("ServerThread.getServiceMessages: Exception: %s" % str(err))
def function[getServiceMessages, parameter[self, remote]]: constant[Get service messages from CCU / Homegear] <ast.Try object at 0x7da1b06558a0>
keyword[def] identifier[getServiceMessages] ( identifier[self] , identifier[remote] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[proxies] [ literal[string] %( identifier[self] . identifier[_interface_id] , identifier[remote] )]. identifier[getServiceMessages] () keyword[except] identifier[Exception] keyword[as] identifier[err] : identifier[LOG] . identifier[debug] ( literal[string] % identifier[str] ( identifier[err] ))
def getServiceMessages(self, remote): """Get service messages from CCU / Homegear""" try: return self.proxies['%s-%s' % (self._interface_id, remote)].getServiceMessages() # depends on [control=['try'], data=[]] except Exception as err: LOG.debug('ServerThread.getServiceMessages: Exception: %s' % str(err)) # depends on [control=['except'], data=['err']]
def check(source, filename='<string>', report_level=docutils.utils.Reporter.INFO_LEVEL, ignore=None, debug=False): """Yield errors. Use lower report_level for noisier error output. Each yielded error is a tuple of the form: (line_number, message) Line numbers are indexed at 1 and are with respect to the full RST file. Each code block is checked asynchronously in a subprocess. Note that this function mutates state by calling the ``docutils`` ``register_*()`` functions. """ # Do this at call time rather than import time to avoid unnecessarily # mutating state. register_code_directive() ignore_sphinx() ignore = ignore or {} try: ignore.setdefault('languages', []).extend( find_ignored_languages(source) ) except Error as error: yield (error.line_number, '{}'.format(error)) writer = CheckWriter(source, filename, ignore=ignore) string_io = io.StringIO() # This is a hack to avoid false positive from docutils (#23). docutils # mistakes BOMs for actual visible letters. This results in the "underline # too short" warning firing. source = strip_byte_order_mark(source) try: docutils.core.publish_string( source, writer=writer, source_path=filename, settings_overrides={'halt_level': report_level, 'report_level': report_level, 'warning_stream': string_io}) except docutils.utils.SystemMessage: pass except AttributeError: # Sphinx will sometimes throw an exception trying to access # "self.state.document.settings.env". Ignore this for now until we # figure out a better approach. if debug: raise for checker in writer.checkers: for error in checker(): yield error rst_errors = string_io.getvalue().strip() if rst_errors: for message in rst_errors.splitlines(): try: ignore_regex = ignore.get('messages', '') if ignore_regex and re.search(ignore_regex, message): continue yield parse_gcc_style_error_message(message, filename=filename, has_column=False) except ValueError: continue
def function[check, parameter[source, filename, report_level, ignore, debug]]: constant[Yield errors. Use lower report_level for noisier error output. Each yielded error is a tuple of the form: (line_number, message) Line numbers are indexed at 1 and are with respect to the full RST file. Each code block is checked asynchronously in a subprocess. Note that this function mutates state by calling the ``docutils`` ``register_*()`` functions. ] call[name[register_code_directive], parameter[]] call[name[ignore_sphinx], parameter[]] variable[ignore] assign[=] <ast.BoolOp object at 0x7da1b08d6770> <ast.Try object at 0x7da1b08d4700> variable[writer] assign[=] call[name[CheckWriter], parameter[name[source], name[filename]]] variable[string_io] assign[=] call[name[io].StringIO, parameter[]] variable[source] assign[=] call[name[strip_byte_order_mark], parameter[name[source]]] <ast.Try object at 0x7da1b08d77f0> for taget[name[checker]] in starred[name[writer].checkers] begin[:] for taget[name[error]] in starred[call[name[checker], parameter[]]] begin[:] <ast.Yield object at 0x7da1b08b0400> variable[rst_errors] assign[=] call[call[name[string_io].getvalue, parameter[]].strip, parameter[]] if name[rst_errors] begin[:] for taget[name[message]] in starred[call[name[rst_errors].splitlines, parameter[]]] begin[:] <ast.Try object at 0x7da1b08b0c70>
keyword[def] identifier[check] ( identifier[source] , identifier[filename] = literal[string] , identifier[report_level] = identifier[docutils] . identifier[utils] . identifier[Reporter] . identifier[INFO_LEVEL] , identifier[ignore] = keyword[None] , identifier[debug] = keyword[False] ): literal[string] identifier[register_code_directive] () identifier[ignore_sphinx] () identifier[ignore] = identifier[ignore] keyword[or] {} keyword[try] : identifier[ignore] . identifier[setdefault] ( literal[string] ,[]). identifier[extend] ( identifier[find_ignored_languages] ( identifier[source] ) ) keyword[except] identifier[Error] keyword[as] identifier[error] : keyword[yield] ( identifier[error] . identifier[line_number] , literal[string] . identifier[format] ( identifier[error] )) identifier[writer] = identifier[CheckWriter] ( identifier[source] , identifier[filename] , identifier[ignore] = identifier[ignore] ) identifier[string_io] = identifier[io] . identifier[StringIO] () identifier[source] = identifier[strip_byte_order_mark] ( identifier[source] ) keyword[try] : identifier[docutils] . identifier[core] . identifier[publish_string] ( identifier[source] , identifier[writer] = identifier[writer] , identifier[source_path] = identifier[filename] , identifier[settings_overrides] ={ literal[string] : identifier[report_level] , literal[string] : identifier[report_level] , literal[string] : identifier[string_io] }) keyword[except] identifier[docutils] . identifier[utils] . identifier[SystemMessage] : keyword[pass] keyword[except] identifier[AttributeError] : keyword[if] identifier[debug] : keyword[raise] keyword[for] identifier[checker] keyword[in] identifier[writer] . identifier[checkers] : keyword[for] identifier[error] keyword[in] identifier[checker] (): keyword[yield] identifier[error] identifier[rst_errors] = identifier[string_io] . identifier[getvalue] (). identifier[strip] () keyword[if] identifier[rst_errors] : keyword[for] identifier[message] keyword[in] identifier[rst_errors] . identifier[splitlines] (): keyword[try] : identifier[ignore_regex] = identifier[ignore] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[ignore_regex] keyword[and] identifier[re] . identifier[search] ( identifier[ignore_regex] , identifier[message] ): keyword[continue] keyword[yield] identifier[parse_gcc_style_error_message] ( identifier[message] , identifier[filename] = identifier[filename] , identifier[has_column] = keyword[False] ) keyword[except] identifier[ValueError] : keyword[continue]
def check(source, filename='<string>', report_level=docutils.utils.Reporter.INFO_LEVEL, ignore=None, debug=False): """Yield errors. Use lower report_level for noisier error output. Each yielded error is a tuple of the form: (line_number, message) Line numbers are indexed at 1 and are with respect to the full RST file. Each code block is checked asynchronously in a subprocess. Note that this function mutates state by calling the ``docutils`` ``register_*()`` functions. """ # Do this at call time rather than import time to avoid unnecessarily # mutating state. register_code_directive() ignore_sphinx() ignore = ignore or {} try: ignore.setdefault('languages', []).extend(find_ignored_languages(source)) # depends on [control=['try'], data=[]] except Error as error: yield (error.line_number, '{}'.format(error)) # depends on [control=['except'], data=['error']] writer = CheckWriter(source, filename, ignore=ignore) string_io = io.StringIO() # This is a hack to avoid false positive from docutils (#23). docutils # mistakes BOMs for actual visible letters. This results in the "underline # too short" warning firing. source = strip_byte_order_mark(source) try: docutils.core.publish_string(source, writer=writer, source_path=filename, settings_overrides={'halt_level': report_level, 'report_level': report_level, 'warning_stream': string_io}) # depends on [control=['try'], data=[]] except docutils.utils.SystemMessage: pass # depends on [control=['except'], data=[]] except AttributeError: # Sphinx will sometimes throw an exception trying to access # "self.state.document.settings.env". Ignore this for now until we # figure out a better approach. if debug: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] for checker in writer.checkers: for error in checker(): yield error # depends on [control=['for'], data=['error']] # depends on [control=['for'], data=['checker']] rst_errors = string_io.getvalue().strip() if rst_errors: for message in rst_errors.splitlines(): try: ignore_regex = ignore.get('messages', '') if ignore_regex and re.search(ignore_regex, message): continue # depends on [control=['if'], data=[]] yield parse_gcc_style_error_message(message, filename=filename, has_column=False) # depends on [control=['try'], data=[]] except ValueError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['message']] # depends on [control=['if'], data=[]]
def hav_dist(locs1, locs2): """ Return a distance matrix between two set of coordinates. Use geometric distance (default) or haversine distance (if longlat=True). Parameters ---------- locs1 : numpy.array The first set of coordinates as [(long, lat), (long, lat)]. locs2 : numpy.array The second set of coordinates as [(long, lat), (long, lat)]. Returns ------- mat_dist : numpy.array The distance matrix between locs1 and locs2 """ # locs1 = np.radians(locs1) # locs2 = np.radians(locs2) cos_lat1 = np.cos(locs1[..., 0]) cos_lat2 = np.cos(locs2[..., 0]) cos_lat_d = np.cos(locs1[..., 0] - locs2[..., 0]) cos_lon_d = np.cos(locs1[..., 1] - locs2[..., 1]) return 6367000 * np.arccos( cos_lat_d - cos_lat1 * cos_lat2 * (1 - cos_lon_d))
def function[hav_dist, parameter[locs1, locs2]]: constant[ Return a distance matrix between two set of coordinates. Use geometric distance (default) or haversine distance (if longlat=True). Parameters ---------- locs1 : numpy.array The first set of coordinates as [(long, lat), (long, lat)]. locs2 : numpy.array The second set of coordinates as [(long, lat), (long, lat)]. Returns ------- mat_dist : numpy.array The distance matrix between locs1 and locs2 ] variable[cos_lat1] assign[=] call[name[np].cos, parameter[call[name[locs1]][tuple[[<ast.Constant object at 0x7da1b0b831c0>, <ast.Constant object at 0x7da1b0b831f0>]]]]] variable[cos_lat2] assign[=] call[name[np].cos, parameter[call[name[locs2]][tuple[[<ast.Constant object at 0x7da1b0b83280>, <ast.Constant object at 0x7da1b0b832e0>]]]]] variable[cos_lat_d] assign[=] call[name[np].cos, parameter[binary_operation[call[name[locs1]][tuple[[<ast.Constant object at 0x7da1b0b83400>, <ast.Constant object at 0x7da1b0b83430>]]] - call[name[locs2]][tuple[[<ast.Constant object at 0x7da1b0b834f0>, <ast.Constant object at 0x7da1b0b83580>]]]]]] variable[cos_lon_d] assign[=] call[name[np].cos, parameter[binary_operation[call[name[locs1]][tuple[[<ast.Constant object at 0x7da1b0b82080>, <ast.Constant object at 0x7da1b0b820b0>]]] - call[name[locs2]][tuple[[<ast.Constant object at 0x7da1b0b81ed0>, <ast.Constant object at 0x7da1b0b81ea0>]]]]]] return[binary_operation[constant[6367000] * call[name[np].arccos, parameter[binary_operation[name[cos_lat_d] - binary_operation[binary_operation[name[cos_lat1] * name[cos_lat2]] * binary_operation[constant[1] - name[cos_lon_d]]]]]]]]
keyword[def] identifier[hav_dist] ( identifier[locs1] , identifier[locs2] ): literal[string] identifier[cos_lat1] = identifier[np] . identifier[cos] ( identifier[locs1] [..., literal[int] ]) identifier[cos_lat2] = identifier[np] . identifier[cos] ( identifier[locs2] [..., literal[int] ]) identifier[cos_lat_d] = identifier[np] . identifier[cos] ( identifier[locs1] [..., literal[int] ]- identifier[locs2] [..., literal[int] ]) identifier[cos_lon_d] = identifier[np] . identifier[cos] ( identifier[locs1] [..., literal[int] ]- identifier[locs2] [..., literal[int] ]) keyword[return] literal[int] * identifier[np] . identifier[arccos] ( identifier[cos_lat_d] - identifier[cos_lat1] * identifier[cos_lat2] *( literal[int] - identifier[cos_lon_d] ))
def hav_dist(locs1, locs2): """ Return a distance matrix between two set of coordinates. Use geometric distance (default) or haversine distance (if longlat=True). Parameters ---------- locs1 : numpy.array The first set of coordinates as [(long, lat), (long, lat)]. locs2 : numpy.array The second set of coordinates as [(long, lat), (long, lat)]. Returns ------- mat_dist : numpy.array The distance matrix between locs1 and locs2 """ # locs1 = np.radians(locs1) # locs2 = np.radians(locs2) cos_lat1 = np.cos(locs1[..., 0]) cos_lat2 = np.cos(locs2[..., 0]) cos_lat_d = np.cos(locs1[..., 0] - locs2[..., 0]) cos_lon_d = np.cos(locs1[..., 1] - locs2[..., 1]) return 6367000 * np.arccos(cos_lat_d - cos_lat1 * cos_lat2 * (1 - cos_lon_d))
def _set_show_clock(self, v, load=False): """ Setter method for show_clock, mapped from YANG variable /brocade_clock_rpc/show_clock (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_clock is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_clock() directly. YANG Description: display current time for the cluster or specified switch """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=show_clock.show_clock, is_leaf=True, yang_name="show-clock", rest_name="show-clock", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'clock-get'}}, namespace='urn:brocade.com:mgmt:brocade-clock', defining_module='brocade-clock', yang_type='rpc', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """show_clock must be of a type compatible with rpc""", 'defined-type': "rpc", 'generated-type': """YANGDynClass(base=show_clock.show_clock, is_leaf=True, yang_name="show-clock", rest_name="show-clock", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'clock-get'}}, namespace='urn:brocade.com:mgmt:brocade-clock', defining_module='brocade-clock', yang_type='rpc', is_config=True)""", }) self.__show_clock = t if hasattr(self, '_set'): self._set()
def function[_set_show_clock, parameter[self, v, load]]: constant[ Setter method for show_clock, mapped from YANG variable /brocade_clock_rpc/show_clock (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_clock is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_clock() directly. YANG Description: display current time for the cluster or specified switch ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b25d8a60> name[self].__show_clock assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_show_clock] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[show_clock] . identifier[show_clock] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__show_clock] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_show_clock(self, v, load=False): """ Setter method for show_clock, mapped from YANG variable /brocade_clock_rpc/show_clock (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_clock is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_clock() directly. YANG Description: display current time for the cluster or specified switch """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=show_clock.show_clock, is_leaf=True, yang_name='show-clock', rest_name='show-clock', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'clock-get'}}, namespace='urn:brocade.com:mgmt:brocade-clock', defining_module='brocade-clock', yang_type='rpc', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'show_clock must be of a type compatible with rpc', 'defined-type': 'rpc', 'generated-type': 'YANGDynClass(base=show_clock.show_clock, is_leaf=True, yang_name="show-clock", rest_name="show-clock", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'hidden\': u\'rpccmd\', u\'actionpoint\': u\'clock-get\'}}, namespace=\'urn:brocade.com:mgmt:brocade-clock\', defining_module=\'brocade-clock\', yang_type=\'rpc\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__show_clock = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def suffix(filename,suffix): ''' returns a filenames with ``suffix`` inserted before the dataset suffix ''' return os.path.split(re.sub(_afni_suffix_regex,"%s\g<1>" % suffix,str(filename)))[1]
def function[suffix, parameter[filename, suffix]]: constant[ returns a filenames with ``suffix`` inserted before the dataset suffix ] return[call[call[name[os].path.split, parameter[call[name[re].sub, parameter[name[_afni_suffix_regex], binary_operation[constant[%s\g<1>] <ast.Mod object at 0x7da2590d6920> name[suffix]], call[name[str], parameter[name[filename]]]]]]]][constant[1]]]
keyword[def] identifier[suffix] ( identifier[filename] , identifier[suffix] ): literal[string] keyword[return] identifier[os] . identifier[path] . identifier[split] ( identifier[re] . identifier[sub] ( identifier[_afni_suffix_regex] , literal[string] % identifier[suffix] , identifier[str] ( identifier[filename] )))[ literal[int] ]
def suffix(filename, suffix): """ returns a filenames with ``suffix`` inserted before the dataset suffix """ return os.path.split(re.sub(_afni_suffix_regex, '%s\\g<1>' % suffix, str(filename)))[1]
def deriv(self, x: str, ctype: ContentType) -> SchemaPattern: """Return derivative of the receiver.""" return Alternative.combine(self.left.deriv(x, ctype), self.right.deriv(x, ctype))
def function[deriv, parameter[self, x, ctype]]: constant[Return derivative of the receiver.] return[call[name[Alternative].combine, parameter[call[name[self].left.deriv, parameter[name[x], name[ctype]]], call[name[self].right.deriv, parameter[name[x], name[ctype]]]]]]
keyword[def] identifier[deriv] ( identifier[self] , identifier[x] : identifier[str] , identifier[ctype] : identifier[ContentType] )-> identifier[SchemaPattern] : literal[string] keyword[return] identifier[Alternative] . identifier[combine] ( identifier[self] . identifier[left] . identifier[deriv] ( identifier[x] , identifier[ctype] ), identifier[self] . identifier[right] . identifier[deriv] ( identifier[x] , identifier[ctype] ))
def deriv(self, x: str, ctype: ContentType) -> SchemaPattern: """Return derivative of the receiver.""" return Alternative.combine(self.left.deriv(x, ctype), self.right.deriv(x, ctype))
def _get_dstk_intersections(self, address, dstk_address): """ Find the unique tokens in the original address and the returned address. """ # Normalize both addresses normalized_address = self._normalize(address) normalized_dstk_address = self._normalize(dstk_address) address_uniques = set(normalized_address) - set(normalized_dstk_address) dstk_address_uniques = set(normalized_dstk_address) - set(normalized_address) if self.logger: self.logger.debug("Address Uniques {0}".format(address_uniques)) if self.logger: self.logger.debug("DSTK Address Uniques {0}".format(dstk_address_uniques)) return (len(address_uniques), len(dstk_address_uniques))
def function[_get_dstk_intersections, parameter[self, address, dstk_address]]: constant[ Find the unique tokens in the original address and the returned address. ] variable[normalized_address] assign[=] call[name[self]._normalize, parameter[name[address]]] variable[normalized_dstk_address] assign[=] call[name[self]._normalize, parameter[name[dstk_address]]] variable[address_uniques] assign[=] binary_operation[call[name[set], parameter[name[normalized_address]]] - call[name[set], parameter[name[normalized_dstk_address]]]] variable[dstk_address_uniques] assign[=] binary_operation[call[name[set], parameter[name[normalized_dstk_address]]] - call[name[set], parameter[name[normalized_address]]]] if name[self].logger begin[:] call[name[self].logger.debug, parameter[call[constant[Address Uniques {0}].format, parameter[name[address_uniques]]]]] if name[self].logger begin[:] call[name[self].logger.debug, parameter[call[constant[DSTK Address Uniques {0}].format, parameter[name[dstk_address_uniques]]]]] return[tuple[[<ast.Call object at 0x7da1b1139ae0>, <ast.Call object at 0x7da1b11392a0>]]]
keyword[def] identifier[_get_dstk_intersections] ( identifier[self] , identifier[address] , identifier[dstk_address] ): literal[string] identifier[normalized_address] = identifier[self] . identifier[_normalize] ( identifier[address] ) identifier[normalized_dstk_address] = identifier[self] . identifier[_normalize] ( identifier[dstk_address] ) identifier[address_uniques] = identifier[set] ( identifier[normalized_address] )- identifier[set] ( identifier[normalized_dstk_address] ) identifier[dstk_address_uniques] = identifier[set] ( identifier[normalized_dstk_address] )- identifier[set] ( identifier[normalized_address] ) keyword[if] identifier[self] . identifier[logger] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[address_uniques] )) keyword[if] identifier[self] . identifier[logger] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[dstk_address_uniques] )) keyword[return] ( identifier[len] ( identifier[address_uniques] ), identifier[len] ( identifier[dstk_address_uniques] ))
def _get_dstk_intersections(self, address, dstk_address): """ Find the unique tokens in the original address and the returned address. """ # Normalize both addresses normalized_address = self._normalize(address) normalized_dstk_address = self._normalize(dstk_address) address_uniques = set(normalized_address) - set(normalized_dstk_address) dstk_address_uniques = set(normalized_dstk_address) - set(normalized_address) if self.logger: self.logger.debug('Address Uniques {0}'.format(address_uniques)) # depends on [control=['if'], data=[]] if self.logger: self.logger.debug('DSTK Address Uniques {0}'.format(dstk_address_uniques)) # depends on [control=['if'], data=[]] return (len(address_uniques), len(dstk_address_uniques))
def get_default_config(self): """ Returns default collector settings. """ config = super(FilesCollector, self).get_default_config() config.update({ 'path': '.', 'dir': '/tmp/diamond', 'delete': False, }) return config
def function[get_default_config, parameter[self]]: constant[ Returns default collector settings. ] variable[config] assign[=] call[call[name[super], parameter[name[FilesCollector], name[self]]].get_default_config, parameter[]] call[name[config].update, parameter[dictionary[[<ast.Constant object at 0x7da20c6c67a0>, <ast.Constant object at 0x7da20c6c6b90>, <ast.Constant object at 0x7da20c6c7910>], [<ast.Constant object at 0x7da20c6c6dd0>, <ast.Constant object at 0x7da20c6c4c40>, <ast.Constant object at 0x7da20c6c62c0>]]]] return[name[config]]
keyword[def] identifier[get_default_config] ( identifier[self] ): literal[string] identifier[config] = identifier[super] ( identifier[FilesCollector] , identifier[self] ). identifier[get_default_config] () identifier[config] . identifier[update] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : keyword[False] , }) keyword[return] identifier[config]
def get_default_config(self): """ Returns default collector settings. """ config = super(FilesCollector, self).get_default_config() config.update({'path': '.', 'dir': '/tmp/diamond', 'delete': False}) return config
def dump_inline_table(self, section): """Preserve inline table in its compact syntax instead of expanding into subsection. https://github.com/toml-lang/toml#user-content-inline-table """ retval = "" if isinstance(section, dict): val_list = [] for k, v in section.items(): val = self.dump_inline_table(v) val_list.append(k + " = " + val) retval += "{ " + ", ".join(val_list) + " }\n" return retval else: return unicode(self.dump_value(section))
def function[dump_inline_table, parameter[self, section]]: constant[Preserve inline table in its compact syntax instead of expanding into subsection. https://github.com/toml-lang/toml#user-content-inline-table ] variable[retval] assign[=] constant[] if call[name[isinstance], parameter[name[section], name[dict]]] begin[:] variable[val_list] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18ede7c70>, <ast.Name object at 0x7da18ede4970>]]] in starred[call[name[section].items, parameter[]]] begin[:] variable[val] assign[=] call[name[self].dump_inline_table, parameter[name[v]]] call[name[val_list].append, parameter[binary_operation[binary_operation[name[k] + constant[ = ]] + name[val]]]] <ast.AugAssign object at 0x7da18ede64d0> return[name[retval]]
keyword[def] identifier[dump_inline_table] ( identifier[self] , identifier[section] ): literal[string] identifier[retval] = literal[string] keyword[if] identifier[isinstance] ( identifier[section] , identifier[dict] ): identifier[val_list] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[section] . identifier[items] (): identifier[val] = identifier[self] . identifier[dump_inline_table] ( identifier[v] ) identifier[val_list] . identifier[append] ( identifier[k] + literal[string] + identifier[val] ) identifier[retval] += literal[string] + literal[string] . identifier[join] ( identifier[val_list] )+ literal[string] keyword[return] identifier[retval] keyword[else] : keyword[return] identifier[unicode] ( identifier[self] . identifier[dump_value] ( identifier[section] ))
def dump_inline_table(self, section): """Preserve inline table in its compact syntax instead of expanding into subsection. https://github.com/toml-lang/toml#user-content-inline-table """ retval = '' if isinstance(section, dict): val_list = [] for (k, v) in section.items(): val = self.dump_inline_table(v) val_list.append(k + ' = ' + val) # depends on [control=['for'], data=[]] retval += '{ ' + ', '.join(val_list) + ' }\n' return retval # depends on [control=['if'], data=[]] else: return unicode(self.dump_value(section))
def __extend_uri(prefixes, short): """ Extend a prefixed uri with the help of a specific dictionary of prefixes :param prefixes: Dictionary of prefixes :param short: Prefixed uri to be extended :return: """ for prefix in prefixes: if short.startswith(prefix): return short.replace(prefix + ':', prefixes[prefix]) return short
def function[__extend_uri, parameter[prefixes, short]]: constant[ Extend a prefixed uri with the help of a specific dictionary of prefixes :param prefixes: Dictionary of prefixes :param short: Prefixed uri to be extended :return: ] for taget[name[prefix]] in starred[name[prefixes]] begin[:] if call[name[short].startswith, parameter[name[prefix]]] begin[:] return[call[name[short].replace, parameter[binary_operation[name[prefix] + constant[:]], call[name[prefixes]][name[prefix]]]]] return[name[short]]
keyword[def] identifier[__extend_uri] ( identifier[prefixes] , identifier[short] ): literal[string] keyword[for] identifier[prefix] keyword[in] identifier[prefixes] : keyword[if] identifier[short] . identifier[startswith] ( identifier[prefix] ): keyword[return] identifier[short] . identifier[replace] ( identifier[prefix] + literal[string] , identifier[prefixes] [ identifier[prefix] ]) keyword[return] identifier[short]
def __extend_uri(prefixes, short): """ Extend a prefixed uri with the help of a specific dictionary of prefixes :param prefixes: Dictionary of prefixes :param short: Prefixed uri to be extended :return: """ for prefix in prefixes: if short.startswith(prefix): return short.replace(prefix + ':', prefixes[prefix]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['prefix']] return short
def dict_strict_update(base_dict, update_dict): """ This function updates base_dict with update_dict if and only if update_dict does not contain keys that are not already in base_dict. It is essentially a more strict interpretation of the term "updating" the dict. If update_dict contains keys that are not in base_dict, a RuntimeError is raised. :param base_dict: The dict that is to be updated. This dict is modified. :param update_dict: The dict containing the new values. """ additional_keys = set(update_dict.keys()) - set(base_dict.keys()) if len(additional_keys) > 0: raise RuntimeError( 'The update dictionary contains keys that are not part of ' 'the base dictionary: {}'.format(str(additional_keys)), additional_keys) base_dict.update(update_dict)
def function[dict_strict_update, parameter[base_dict, update_dict]]: constant[ This function updates base_dict with update_dict if and only if update_dict does not contain keys that are not already in base_dict. It is essentially a more strict interpretation of the term "updating" the dict. If update_dict contains keys that are not in base_dict, a RuntimeError is raised. :param base_dict: The dict that is to be updated. This dict is modified. :param update_dict: The dict containing the new values. ] variable[additional_keys] assign[=] binary_operation[call[name[set], parameter[call[name[update_dict].keys, parameter[]]]] - call[name[set], parameter[call[name[base_dict].keys, parameter[]]]]] if compare[call[name[len], parameter[name[additional_keys]]] greater[>] constant[0]] begin[:] <ast.Raise object at 0x7da1b13913f0> call[name[base_dict].update, parameter[name[update_dict]]]
keyword[def] identifier[dict_strict_update] ( identifier[base_dict] , identifier[update_dict] ): literal[string] identifier[additional_keys] = identifier[set] ( identifier[update_dict] . identifier[keys] ())- identifier[set] ( identifier[base_dict] . identifier[keys] ()) keyword[if] identifier[len] ( identifier[additional_keys] )> literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] . identifier[format] ( identifier[str] ( identifier[additional_keys] )), identifier[additional_keys] ) identifier[base_dict] . identifier[update] ( identifier[update_dict] )
def dict_strict_update(base_dict, update_dict): """ This function updates base_dict with update_dict if and only if update_dict does not contain keys that are not already in base_dict. It is essentially a more strict interpretation of the term "updating" the dict. If update_dict contains keys that are not in base_dict, a RuntimeError is raised. :param base_dict: The dict that is to be updated. This dict is modified. :param update_dict: The dict containing the new values. """ additional_keys = set(update_dict.keys()) - set(base_dict.keys()) if len(additional_keys) > 0: raise RuntimeError('The update dictionary contains keys that are not part of the base dictionary: {}'.format(str(additional_keys)), additional_keys) # depends on [control=['if'], data=[]] base_dict.update(update_dict)
def get_feature(self, ds, feat): """Return filtered feature data The features are filtered according to the user-defined filters, using the information in `ds._filter`. In addition, all `nan` and `inf` values are purged. Parameters ---------- ds: dclab.rtdc_dataset.RTDCBase The dataset containing the feature feat: str The name of the feature; must be a scalar feature """ if ds.config["filtering"]["enable filters"]: x = ds[feat][ds._filter] else: x = ds[feat] bad = np.isnan(x) | np.isinf(x) xout = x[~bad] return xout
def function[get_feature, parameter[self, ds, feat]]: constant[Return filtered feature data The features are filtered according to the user-defined filters, using the information in `ds._filter`. In addition, all `nan` and `inf` values are purged. Parameters ---------- ds: dclab.rtdc_dataset.RTDCBase The dataset containing the feature feat: str The name of the feature; must be a scalar feature ] if call[call[name[ds].config][constant[filtering]]][constant[enable filters]] begin[:] variable[x] assign[=] call[call[name[ds]][name[feat]]][name[ds]._filter] variable[bad] assign[=] binary_operation[call[name[np].isnan, parameter[name[x]]] <ast.BitOr object at 0x7da2590d6aa0> call[name[np].isinf, parameter[name[x]]]] variable[xout] assign[=] call[name[x]][<ast.UnaryOp object at 0x7da1b1865cc0>] return[name[xout]]
keyword[def] identifier[get_feature] ( identifier[self] , identifier[ds] , identifier[feat] ): literal[string] keyword[if] identifier[ds] . identifier[config] [ literal[string] ][ literal[string] ]: identifier[x] = identifier[ds] [ identifier[feat] ][ identifier[ds] . identifier[_filter] ] keyword[else] : identifier[x] = identifier[ds] [ identifier[feat] ] identifier[bad] = identifier[np] . identifier[isnan] ( identifier[x] )| identifier[np] . identifier[isinf] ( identifier[x] ) identifier[xout] = identifier[x] [~ identifier[bad] ] keyword[return] identifier[xout]
def get_feature(self, ds, feat): """Return filtered feature data The features are filtered according to the user-defined filters, using the information in `ds._filter`. In addition, all `nan` and `inf` values are purged. Parameters ---------- ds: dclab.rtdc_dataset.RTDCBase The dataset containing the feature feat: str The name of the feature; must be a scalar feature """ if ds.config['filtering']['enable filters']: x = ds[feat][ds._filter] # depends on [control=['if'], data=[]] else: x = ds[feat] bad = np.isnan(x) | np.isinf(x) xout = x[~bad] return xout
def QueryAttachments(self, document_link, query, options=None): """Queries attachments in a document. :param str document_link: The link to the document. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Attachments. :rtype: query_iterable.QueryIterable """ if options is None: options = {} path = base.GetPathFromLink(document_link, 'attachments') document_id = base.GetResourceIdOrFullNameFromLink(document_link) def fetch_fn(options): return self.__QueryFeed(path, 'attachments', document_id, lambda r: r['Attachments'], lambda _, b: b, query, options), self.last_response_headers return query_iterable.QueryIterable(self, query, options, fetch_fn)
def function[QueryAttachments, parameter[self, document_link, query, options]]: constant[Queries attachments in a document. :param str document_link: The link to the document. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Attachments. :rtype: query_iterable.QueryIterable ] if compare[name[options] is constant[None]] begin[:] variable[options] assign[=] dictionary[[], []] variable[path] assign[=] call[name[base].GetPathFromLink, parameter[name[document_link], constant[attachments]]] variable[document_id] assign[=] call[name[base].GetResourceIdOrFullNameFromLink, parameter[name[document_link]]] def function[fetch_fn, parameter[options]]: return[tuple[[<ast.Call object at 0x7da1b18e6020>, <ast.Attribute object at 0x7da1b18e60b0>]]] return[call[name[query_iterable].QueryIterable, parameter[name[self], name[query], name[options], name[fetch_fn]]]]
keyword[def] identifier[QueryAttachments] ( identifier[self] , identifier[document_link] , identifier[query] , identifier[options] = keyword[None] ): literal[string] keyword[if] identifier[options] keyword[is] keyword[None] : identifier[options] ={} identifier[path] = identifier[base] . identifier[GetPathFromLink] ( identifier[document_link] , literal[string] ) identifier[document_id] = identifier[base] . identifier[GetResourceIdOrFullNameFromLink] ( identifier[document_link] ) keyword[def] identifier[fetch_fn] ( identifier[options] ): keyword[return] identifier[self] . identifier[__QueryFeed] ( identifier[path] , literal[string] , identifier[document_id] , keyword[lambda] identifier[r] : identifier[r] [ literal[string] ], keyword[lambda] identifier[_] , identifier[b] : identifier[b] , identifier[query] , identifier[options] ), identifier[self] . identifier[last_response_headers] keyword[return] identifier[query_iterable] . identifier[QueryIterable] ( identifier[self] , identifier[query] , identifier[options] , identifier[fetch_fn] )
def QueryAttachments(self, document_link, query, options=None): """Queries attachments in a document. :param str document_link: The link to the document. :param (str or dict) query: :param dict options: The request options for the request. :return: Query Iterable of Attachments. :rtype: query_iterable.QueryIterable """ if options is None: options = {} # depends on [control=['if'], data=['options']] path = base.GetPathFromLink(document_link, 'attachments') document_id = base.GetResourceIdOrFullNameFromLink(document_link) def fetch_fn(options): return (self.__QueryFeed(path, 'attachments', document_id, lambda r: r['Attachments'], lambda _, b: b, query, options), self.last_response_headers) return query_iterable.QueryIterable(self, query, options, fetch_fn)
def cortex_rgba_plot_2D(the_map, rgba, axes=None, triangulation=None): ''' cortex_rgba_plot_2D(map, rgba, axes) plots the given cortical map on the given axes using the given (n x 4) matrix of vertex colors and yields the resulting polygon collection object. cortex_rgba_plot_2D(map, rgba) uses matplotlib.pyplot.gca() for the axes. The option triangulation may also be passed if the triangularion object has already been created; otherwise it is generated fresh. ''' cmap = colors_to_cmap(rgba) zs = np.linspace(0.0, 1.0, the_map.vertex_count) return cortex_cmap_plot_2D(the_map, zs, cmap, axes=axes, triangulation=triangulation)
def function[cortex_rgba_plot_2D, parameter[the_map, rgba, axes, triangulation]]: constant[ cortex_rgba_plot_2D(map, rgba, axes) plots the given cortical map on the given axes using the given (n x 4) matrix of vertex colors and yields the resulting polygon collection object. cortex_rgba_plot_2D(map, rgba) uses matplotlib.pyplot.gca() for the axes. The option triangulation may also be passed if the triangularion object has already been created; otherwise it is generated fresh. ] variable[cmap] assign[=] call[name[colors_to_cmap], parameter[name[rgba]]] variable[zs] assign[=] call[name[np].linspace, parameter[constant[0.0], constant[1.0], name[the_map].vertex_count]] return[call[name[cortex_cmap_plot_2D], parameter[name[the_map], name[zs], name[cmap]]]]
keyword[def] identifier[cortex_rgba_plot_2D] ( identifier[the_map] , identifier[rgba] , identifier[axes] = keyword[None] , identifier[triangulation] = keyword[None] ): literal[string] identifier[cmap] = identifier[colors_to_cmap] ( identifier[rgba] ) identifier[zs] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[the_map] . identifier[vertex_count] ) keyword[return] identifier[cortex_cmap_plot_2D] ( identifier[the_map] , identifier[zs] , identifier[cmap] , identifier[axes] = identifier[axes] , identifier[triangulation] = identifier[triangulation] )
def cortex_rgba_plot_2D(the_map, rgba, axes=None, triangulation=None): """ cortex_rgba_plot_2D(map, rgba, axes) plots the given cortical map on the given axes using the given (n x 4) matrix of vertex colors and yields the resulting polygon collection object. cortex_rgba_plot_2D(map, rgba) uses matplotlib.pyplot.gca() for the axes. The option triangulation may also be passed if the triangularion object has already been created; otherwise it is generated fresh. """ cmap = colors_to_cmap(rgba) zs = np.linspace(0.0, 1.0, the_map.vertex_count) return cortex_cmap_plot_2D(the_map, zs, cmap, axes=axes, triangulation=triangulation)
def setup(self, app: web.Application): """ Installation routes to app.router :param app: instance of aiohttp.web.Application """ if self.app is app: raise ValueError('The router is already configured ' 'for this application') self.app = app routes = sorted( ((r.name, (r, r.url_for().human_repr())) for r in self.routes()), key=utils.sort_key) exists = set() # type: Set[str] for name, (route, path) in routes: if name and name not in exists: exists.add(name) else: name = None app.router.add_route( route.method, path, route.handler, name=name)
def function[setup, parameter[self, app]]: constant[ Installation routes to app.router :param app: instance of aiohttp.web.Application ] if compare[name[self].app is name[app]] begin[:] <ast.Raise object at 0x7da1b0c53bb0> name[self].app assign[=] name[app] variable[routes] assign[=] call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da1b0c53c10>]] variable[exists] assign[=] call[name[set], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b0c53520>, <ast.Tuple object at 0x7da1b0c53eb0>]]] in starred[name[routes]] begin[:] if <ast.BoolOp object at 0x7da1b0c51d20> begin[:] call[name[exists].add, parameter[name[name]]] call[name[app].router.add_route, parameter[name[route].method, name[path], name[route].handler]]
keyword[def] identifier[setup] ( identifier[self] , identifier[app] : identifier[web] . identifier[Application] ): literal[string] keyword[if] identifier[self] . identifier[app] keyword[is] identifier[app] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[self] . identifier[app] = identifier[app] identifier[routes] = identifier[sorted] ( (( identifier[r] . identifier[name] ,( identifier[r] , identifier[r] . identifier[url_for] (). identifier[human_repr] ())) keyword[for] identifier[r] keyword[in] identifier[self] . identifier[routes] ()), identifier[key] = identifier[utils] . identifier[sort_key] ) identifier[exists] = identifier[set] () keyword[for] identifier[name] ,( identifier[route] , identifier[path] ) keyword[in] identifier[routes] : keyword[if] identifier[name] keyword[and] identifier[name] keyword[not] keyword[in] identifier[exists] : identifier[exists] . identifier[add] ( identifier[name] ) keyword[else] : identifier[name] = keyword[None] identifier[app] . identifier[router] . identifier[add_route] ( identifier[route] . identifier[method] , identifier[path] , identifier[route] . identifier[handler] , identifier[name] = identifier[name] )
def setup(self, app: web.Application): """ Installation routes to app.router :param app: instance of aiohttp.web.Application """ if self.app is app: raise ValueError('The router is already configured for this application') # depends on [control=['if'], data=[]] self.app = app routes = sorted(((r.name, (r, r.url_for().human_repr())) for r in self.routes()), key=utils.sort_key) exists = set() # type: Set[str] for (name, (route, path)) in routes: if name and name not in exists: exists.add(name) # depends on [control=['if'], data=[]] else: name = None app.router.add_route(route.method, path, route.handler, name=name) # depends on [control=['for'], data=[]]
def generate_workflow_description(self): ''' Generate workflow json for launching the workflow against the gbdx api Args: None Returns: json string ''' if not self.tasks: raise WorkflowError('Workflow contains no tasks, and cannot be executed.') self.definition = self.workflow_skeleton() if self.batch_values: self.definition["batch_values"] = self.batch_values all_input_port_values = [t.inputs.__getattribute__(input_port_name).value for t in self.tasks for input_port_name in t.inputs._portnames] for task in self.tasks: # only include multiplex output ports in this task if other tasks refer to them in their inputs. # 1. find the multplex output port_names in this task # 2. see if they are referred to in any other tasks inputs # 3. If not, exclude them from the workflow_def output_multiplex_ports_to_exclude = [] multiplex_output_port_names = [portname for portname in task.outputs._portnames if task.outputs.__getattribute__(portname).is_multiplex] for p in multiplex_output_port_names: output_port_reference = 'source:' + task.name + ':' + p if output_port_reference not in all_input_port_values: output_multiplex_ports_to_exclude.append(p) task_def = task.generate_task_workflow_json( output_multiplex_ports_to_exclude=output_multiplex_ports_to_exclude) self.definition['tasks'].append(task_def) if self.callback: self.definition['callback'] = self.callback return self.definition
def function[generate_workflow_description, parameter[self]]: constant[ Generate workflow json for launching the workflow against the gbdx api Args: None Returns: json string ] if <ast.UnaryOp object at 0x7da1aff36350> begin[:] <ast.Raise object at 0x7da1aff35300> name[self].definition assign[=] call[name[self].workflow_skeleton, parameter[]] if name[self].batch_values begin[:] call[name[self].definition][constant[batch_values]] assign[=] name[self].batch_values variable[all_input_port_values] assign[=] <ast.ListComp object at 0x7da1aff367d0> for taget[name[task]] in starred[name[self].tasks] begin[:] variable[output_multiplex_ports_to_exclude] assign[=] list[[]] variable[multiplex_output_port_names] assign[=] <ast.ListComp object at 0x7da1b0142380> for taget[name[p]] in starred[name[multiplex_output_port_names]] begin[:] variable[output_port_reference] assign[=] binary_operation[binary_operation[binary_operation[constant[source:] + name[task].name] + constant[:]] + name[p]] if compare[name[output_port_reference] <ast.NotIn object at 0x7da2590d7190> name[all_input_port_values]] begin[:] call[name[output_multiplex_ports_to_exclude].append, parameter[name[p]]] variable[task_def] assign[=] call[name[task].generate_task_workflow_json, parameter[]] call[call[name[self].definition][constant[tasks]].append, parameter[name[task_def]]] if name[self].callback begin[:] call[name[self].definition][constant[callback]] assign[=] name[self].callback return[name[self].definition]
keyword[def] identifier[generate_workflow_description] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[tasks] : keyword[raise] identifier[WorkflowError] ( literal[string] ) identifier[self] . identifier[definition] = identifier[self] . identifier[workflow_skeleton] () keyword[if] identifier[self] . identifier[batch_values] : identifier[self] . identifier[definition] [ literal[string] ]= identifier[self] . identifier[batch_values] identifier[all_input_port_values] =[ identifier[t] . identifier[inputs] . identifier[__getattribute__] ( identifier[input_port_name] ). identifier[value] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[tasks] keyword[for] identifier[input_port_name] keyword[in] identifier[t] . identifier[inputs] . identifier[_portnames] ] keyword[for] identifier[task] keyword[in] identifier[self] . identifier[tasks] : identifier[output_multiplex_ports_to_exclude] =[] identifier[multiplex_output_port_names] =[ identifier[portname] keyword[for] identifier[portname] keyword[in] identifier[task] . identifier[outputs] . identifier[_portnames] keyword[if] identifier[task] . identifier[outputs] . identifier[__getattribute__] ( identifier[portname] ). identifier[is_multiplex] ] keyword[for] identifier[p] keyword[in] identifier[multiplex_output_port_names] : identifier[output_port_reference] = literal[string] + identifier[task] . identifier[name] + literal[string] + identifier[p] keyword[if] identifier[output_port_reference] keyword[not] keyword[in] identifier[all_input_port_values] : identifier[output_multiplex_ports_to_exclude] . identifier[append] ( identifier[p] ) identifier[task_def] = identifier[task] . identifier[generate_task_workflow_json] ( identifier[output_multiplex_ports_to_exclude] = identifier[output_multiplex_ports_to_exclude] ) identifier[self] . identifier[definition] [ literal[string] ]. identifier[append] ( identifier[task_def] ) keyword[if] identifier[self] . identifier[callback] : identifier[self] . identifier[definition] [ literal[string] ]= identifier[self] . identifier[callback] keyword[return] identifier[self] . identifier[definition]
def generate_workflow_description(self): """ Generate workflow json for launching the workflow against the gbdx api Args: None Returns: json string """ if not self.tasks: raise WorkflowError('Workflow contains no tasks, and cannot be executed.') # depends on [control=['if'], data=[]] self.definition = self.workflow_skeleton() if self.batch_values: self.definition['batch_values'] = self.batch_values # depends on [control=['if'], data=[]] all_input_port_values = [t.inputs.__getattribute__(input_port_name).value for t in self.tasks for input_port_name in t.inputs._portnames] for task in self.tasks: # only include multiplex output ports in this task if other tasks refer to them in their inputs. # 1. find the multplex output port_names in this task # 2. see if they are referred to in any other tasks inputs # 3. If not, exclude them from the workflow_def output_multiplex_ports_to_exclude = [] multiplex_output_port_names = [portname for portname in task.outputs._portnames if task.outputs.__getattribute__(portname).is_multiplex] for p in multiplex_output_port_names: output_port_reference = 'source:' + task.name + ':' + p if output_port_reference not in all_input_port_values: output_multiplex_ports_to_exclude.append(p) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']] task_def = task.generate_task_workflow_json(output_multiplex_ports_to_exclude=output_multiplex_ports_to_exclude) self.definition['tasks'].append(task_def) # depends on [control=['for'], data=['task']] if self.callback: self.definition['callback'] = self.callback # depends on [control=['if'], data=[]] return self.definition
def n_exec_stmt(self, node): """ exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT exec_stmt ::= expr exprlist EXEC_STMT """ start = len(self.f.getvalue()) + len(self.indent) try: super(FragmentsWalker, self).n_exec_stmt(node) except GenericASTTraversalPruningException: pass self.set_pos_info(node, start, len(self.f.getvalue())) self.set_pos_info(node[-1], start, len(self.f.getvalue())) self.prune()
def function[n_exec_stmt, parameter[self, node]]: constant[ exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT exec_stmt ::= expr exprlist EXEC_STMT ] variable[start] assign[=] binary_operation[call[name[len], parameter[call[name[self].f.getvalue, parameter[]]]] + call[name[len], parameter[name[self].indent]]] <ast.Try object at 0x7da18dc9b0d0> call[name[self].set_pos_info, parameter[name[node], name[start], call[name[len], parameter[call[name[self].f.getvalue, parameter[]]]]]] call[name[self].set_pos_info, parameter[call[name[node]][<ast.UnaryOp object at 0x7da18dc9b220>], name[start], call[name[len], parameter[call[name[self].f.getvalue, parameter[]]]]]] call[name[self].prune, parameter[]]
keyword[def] identifier[n_exec_stmt] ( identifier[self] , identifier[node] ): literal[string] identifier[start] = identifier[len] ( identifier[self] . identifier[f] . identifier[getvalue] ())+ identifier[len] ( identifier[self] . identifier[indent] ) keyword[try] : identifier[super] ( identifier[FragmentsWalker] , identifier[self] ). identifier[n_exec_stmt] ( identifier[node] ) keyword[except] identifier[GenericASTTraversalPruningException] : keyword[pass] identifier[self] . identifier[set_pos_info] ( identifier[node] , identifier[start] , identifier[len] ( identifier[self] . identifier[f] . identifier[getvalue] ())) identifier[self] . identifier[set_pos_info] ( identifier[node] [- literal[int] ], identifier[start] , identifier[len] ( identifier[self] . identifier[f] . identifier[getvalue] ())) identifier[self] . identifier[prune] ()
def n_exec_stmt(self, node): """ exec_stmt ::= expr exprlist DUP_TOP EXEC_STMT exec_stmt ::= expr exprlist EXEC_STMT """ start = len(self.f.getvalue()) + len(self.indent) try: super(FragmentsWalker, self).n_exec_stmt(node) # depends on [control=['try'], data=[]] except GenericASTTraversalPruningException: pass # depends on [control=['except'], data=[]] self.set_pos_info(node, start, len(self.f.getvalue())) self.set_pos_info(node[-1], start, len(self.f.getvalue())) self.prune()
def prompt_user_to_select_link(self, links): """ Prompt the user to select a link from a list to open. Return the link that was selected, or ``None`` if no link was selected. """ link_pages = self.get_link_pages(links) n = 0 while n in range(len(link_pages)): link_page = link_pages[n] text = 'Select a link to open (page {} of {}):\n\n' text = text.format(n+1, len(link_pages)) text += self.get_link_page_text(link_page) if link_page is not link_pages[-1]: text += '[j] next page...' if link_page is not link_pages[0]: if link_page is not link_pages[-1]: text += '\n' text += '[k] ...previous page' try: choice = chr(self.show_notification(text)) try: choice = int(choice) except ValueError: pass except ValueError: return None if choice == 'j': if link_page is not link_pages[-1]: n += 1 continue elif choice == 'k': if link_page is not link_pages[0]: n -= 1 continue elif choice not in range(len(link_page)): return None return link_page[choice]['href']
def function[prompt_user_to_select_link, parameter[self, links]]: constant[ Prompt the user to select a link from a list to open. Return the link that was selected, or ``None`` if no link was selected. ] variable[link_pages] assign[=] call[name[self].get_link_pages, parameter[name[links]]] variable[n] assign[=] constant[0] while compare[name[n] in call[name[range], parameter[call[name[len], parameter[name[link_pages]]]]]] begin[:] variable[link_page] assign[=] call[name[link_pages]][name[n]] variable[text] assign[=] constant[Select a link to open (page {} of {}): ] variable[text] assign[=] call[name[text].format, parameter[binary_operation[name[n] + constant[1]], call[name[len], parameter[name[link_pages]]]]] <ast.AugAssign object at 0x7da18dc049a0> if compare[name[link_page] is_not call[name[link_pages]][<ast.UnaryOp object at 0x7da18dc07d30>]] begin[:] <ast.AugAssign object at 0x7da18dc05000> if compare[name[link_page] is_not call[name[link_pages]][constant[0]]] begin[:] if compare[name[link_page] is_not call[name[link_pages]][<ast.UnaryOp object at 0x7da18dc06530>]] begin[:] <ast.AugAssign object at 0x7da18dc06560> <ast.AugAssign object at 0x7da18dc06dd0> <ast.Try object at 0x7da18dc04640> if compare[name[choice] equal[==] constant[j]] begin[:] if compare[name[link_page] is_not call[name[link_pages]][<ast.UnaryOp object at 0x7da18dc07820>]] begin[:] <ast.AugAssign object at 0x7da18dc06680> continue return[call[call[name[link_page]][name[choice]]][constant[href]]]
keyword[def] identifier[prompt_user_to_select_link] ( identifier[self] , identifier[links] ): literal[string] identifier[link_pages] = identifier[self] . identifier[get_link_pages] ( identifier[links] ) identifier[n] = literal[int] keyword[while] identifier[n] keyword[in] identifier[range] ( identifier[len] ( identifier[link_pages] )): identifier[link_page] = identifier[link_pages] [ identifier[n] ] identifier[text] = literal[string] identifier[text] = identifier[text] . identifier[format] ( identifier[n] + literal[int] , identifier[len] ( identifier[link_pages] )) identifier[text] += identifier[self] . identifier[get_link_page_text] ( identifier[link_page] ) keyword[if] identifier[link_page] keyword[is] keyword[not] identifier[link_pages] [- literal[int] ]: identifier[text] += literal[string] keyword[if] identifier[link_page] keyword[is] keyword[not] identifier[link_pages] [ literal[int] ]: keyword[if] identifier[link_page] keyword[is] keyword[not] identifier[link_pages] [- literal[int] ]: identifier[text] += literal[string] identifier[text] += literal[string] keyword[try] : identifier[choice] = identifier[chr] ( identifier[self] . identifier[show_notification] ( identifier[text] )) keyword[try] : identifier[choice] = identifier[int] ( identifier[choice] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[except] identifier[ValueError] : keyword[return] keyword[None] keyword[if] identifier[choice] == literal[string] : keyword[if] identifier[link_page] keyword[is] keyword[not] identifier[link_pages] [- literal[int] ]: identifier[n] += literal[int] keyword[continue] keyword[elif] identifier[choice] == literal[string] : keyword[if] identifier[link_page] keyword[is] keyword[not] identifier[link_pages] [ literal[int] ]: identifier[n] -= literal[int] keyword[continue] keyword[elif] identifier[choice] keyword[not] keyword[in] identifier[range] ( identifier[len] ( identifier[link_page] )): keyword[return] keyword[None] keyword[return] identifier[link_page] [ identifier[choice] ][ literal[string] ]
def prompt_user_to_select_link(self, links): """ Prompt the user to select a link from a list to open. Return the link that was selected, or ``None`` if no link was selected. """ link_pages = self.get_link_pages(links) n = 0 while n in range(len(link_pages)): link_page = link_pages[n] text = 'Select a link to open (page {} of {}):\n\n' text = text.format(n + 1, len(link_pages)) text += self.get_link_page_text(link_page) if link_page is not link_pages[-1]: text += '[j] next page...' # depends on [control=['if'], data=[]] if link_page is not link_pages[0]: if link_page is not link_pages[-1]: text += '\n' # depends on [control=['if'], data=[]] text += '[k] ...previous page' # depends on [control=['if'], data=['link_page']] try: choice = chr(self.show_notification(text)) try: choice = int(choice) # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] except ValueError: return None # depends on [control=['except'], data=[]] if choice == 'j': if link_page is not link_pages[-1]: n += 1 # depends on [control=['if'], data=[]] continue # depends on [control=['if'], data=[]] elif choice == 'k': if link_page is not link_pages[0]: n -= 1 # depends on [control=['if'], data=[]] continue # depends on [control=['if'], data=[]] elif choice not in range(len(link_page)): return None # depends on [control=['if'], data=[]] return link_page[choice]['href'] # depends on [control=['while'], data=['n']]
def get_lan_ip(interface="default"): if sys.version_info < (3, 0, 0): if type(interface) == str: interface = unicode(interface) else: if type(interface) == bytes: interface = interface.decode("utf-8") # Get ID of interface that handles WAN stuff. default_gateway = get_default_gateway(interface) gateways = netifaces.gateways() wan_id = None if netifaces.AF_INET in gateways: gw_list = gateways[netifaces.AF_INET] for gw_info in gw_list: if gw_info[0] == default_gateway: wan_id = gw_info[1] break # Find LAN IP of interface for WAN stuff. interfaces = netifaces.interfaces() if wan_id in interfaces: families = netifaces.ifaddresses(wan_id) if netifaces.AF_INET in families: if_info_list = families[netifaces.AF_INET] for if_info in if_info_list: if "addr" in if_info: return if_info["addr"] """ Execution may reach here if the host is using virtual interfaces on Linux and there are no gateways which suggests the host is a VPS or server. In this case """ if platform.system() == "Linux": if ip is not None: return ip.routes["8.8.8.8"]["prefsrc"] return None
def function[get_lan_ip, parameter[interface]]: if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b065b7c0>, <ast.Constant object at 0x7da1b065a590>, <ast.Constant object at 0x7da18f720820>]]] begin[:] if compare[call[name[type], parameter[name[interface]]] equal[==] name[str]] begin[:] variable[interface] assign[=] call[name[unicode], parameter[name[interface]]] variable[default_gateway] assign[=] call[name[get_default_gateway], parameter[name[interface]]] variable[gateways] assign[=] call[name[netifaces].gateways, parameter[]] variable[wan_id] assign[=] constant[None] if compare[name[netifaces].AF_INET in name[gateways]] begin[:] variable[gw_list] assign[=] call[name[gateways]][name[netifaces].AF_INET] for taget[name[gw_info]] in starred[name[gw_list]] begin[:] if compare[call[name[gw_info]][constant[0]] equal[==] name[default_gateway]] begin[:] variable[wan_id] assign[=] call[name[gw_info]][constant[1]] break variable[interfaces] assign[=] call[name[netifaces].interfaces, parameter[]] if compare[name[wan_id] in name[interfaces]] begin[:] variable[families] assign[=] call[name[netifaces].ifaddresses, parameter[name[wan_id]]] if compare[name[netifaces].AF_INET in name[families]] begin[:] variable[if_info_list] assign[=] call[name[families]][name[netifaces].AF_INET] for taget[name[if_info]] in starred[name[if_info_list]] begin[:] if compare[constant[addr] in name[if_info]] begin[:] return[call[name[if_info]][constant[addr]]] constant[ Execution may reach here if the host is using virtual interfaces on Linux and there are no gateways which suggests the host is a VPS or server. In this case ] if compare[call[name[platform].system, parameter[]] equal[==] constant[Linux]] begin[:] if compare[name[ip] is_not constant[None]] begin[:] return[call[call[name[ip].routes][constant[8.8.8.8]]][constant[prefsrc]]] return[constant[None]]
keyword[def] identifier[get_lan_ip] ( identifier[interface] = literal[string] ): keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] , literal[int] ): keyword[if] identifier[type] ( identifier[interface] )== identifier[str] : identifier[interface] = identifier[unicode] ( identifier[interface] ) keyword[else] : keyword[if] identifier[type] ( identifier[interface] )== identifier[bytes] : identifier[interface] = identifier[interface] . identifier[decode] ( literal[string] ) identifier[default_gateway] = identifier[get_default_gateway] ( identifier[interface] ) identifier[gateways] = identifier[netifaces] . identifier[gateways] () identifier[wan_id] = keyword[None] keyword[if] identifier[netifaces] . identifier[AF_INET] keyword[in] identifier[gateways] : identifier[gw_list] = identifier[gateways] [ identifier[netifaces] . identifier[AF_INET] ] keyword[for] identifier[gw_info] keyword[in] identifier[gw_list] : keyword[if] identifier[gw_info] [ literal[int] ]== identifier[default_gateway] : identifier[wan_id] = identifier[gw_info] [ literal[int] ] keyword[break] identifier[interfaces] = identifier[netifaces] . identifier[interfaces] () keyword[if] identifier[wan_id] keyword[in] identifier[interfaces] : identifier[families] = identifier[netifaces] . identifier[ifaddresses] ( identifier[wan_id] ) keyword[if] identifier[netifaces] . identifier[AF_INET] keyword[in] identifier[families] : identifier[if_info_list] = identifier[families] [ identifier[netifaces] . identifier[AF_INET] ] keyword[for] identifier[if_info] keyword[in] identifier[if_info_list] : keyword[if] literal[string] keyword[in] identifier[if_info] : keyword[return] identifier[if_info] [ literal[string] ] literal[string] keyword[if] identifier[platform] . identifier[system] ()== literal[string] : keyword[if] identifier[ip] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[ip] . identifier[routes] [ literal[string] ][ literal[string] ] keyword[return] keyword[None]
def get_lan_ip(interface='default'): if sys.version_info < (3, 0, 0): if type(interface) == str: interface = unicode(interface) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif type(interface) == bytes: interface = interface.decode('utf-8') # depends on [control=['if'], data=[]] # Get ID of interface that handles WAN stuff. default_gateway = get_default_gateway(interface) gateways = netifaces.gateways() wan_id = None if netifaces.AF_INET in gateways: gw_list = gateways[netifaces.AF_INET] for gw_info in gw_list: if gw_info[0] == default_gateway: wan_id = gw_info[1] break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['gw_info']] # depends on [control=['if'], data=['gateways']] # Find LAN IP of interface for WAN stuff. interfaces = netifaces.interfaces() if wan_id in interfaces: families = netifaces.ifaddresses(wan_id) if netifaces.AF_INET in families: if_info_list = families[netifaces.AF_INET] for if_info in if_info_list: if 'addr' in if_info: return if_info['addr'] # depends on [control=['if'], data=['if_info']] # depends on [control=['for'], data=['if_info']] # depends on [control=['if'], data=['families']] # depends on [control=['if'], data=['wan_id']] '\n Execution may reach here if the host is using\n virtual interfaces on Linux and there are no gateways\n which suggests the host is a VPS or server. In this\n case\n ' if platform.system() == 'Linux': if ip is not None: return ip.routes['8.8.8.8']['prefsrc'] # depends on [control=['if'], data=['ip']] # depends on [control=['if'], data=[]] return None
def add_pipe( self, component, name=None, before=None, after=None, first=None, last=None ): """Add a component to the processing pipeline. Valid components are callables that take a `Doc` object, modify it and return it. Only one of before/after/first/last can be set. Default behaviour is "last". component (callable): The pipeline component. name (unicode): Name of pipeline component. Overwrites existing component.name attribute if available. If no name is set and the component exposes no name attribute, component.__name__ is used. An error is raised if a name already exists in the pipeline. before (unicode): Component name to insert component directly before. after (unicode): Component name to insert component directly after. first (bool): Insert component first / not first in the pipeline. last (bool): Insert component last / not last in the pipeline. DOCS: https://spacy.io/api/language#add_pipe """ if not hasattr(component, "__call__"): msg = Errors.E003.format(component=repr(component), name=name) if isinstance(component, basestring_) and component in self.factories: msg += Errors.E004.format(component=component) raise ValueError(msg) if name is None: if hasattr(component, "name"): name = component.name elif hasattr(component, "__name__"): name = component.__name__ elif hasattr(component, "__class__") and hasattr( component.__class__, "__name__" ): name = component.__class__.__name__ else: name = repr(component) if name in self.pipe_names: raise ValueError(Errors.E007.format(name=name, opts=self.pipe_names)) if sum([bool(before), bool(after), bool(first), bool(last)]) >= 2: raise ValueError(Errors.E006) pipe = (name, component) if last or not any([first, before, after]): self.pipeline.append(pipe) elif first: self.pipeline.insert(0, pipe) elif before and before in self.pipe_names: self.pipeline.insert(self.pipe_names.index(before), pipe) elif after and after in self.pipe_names: self.pipeline.insert(self.pipe_names.index(after) + 1, pipe) else: raise ValueError( Errors.E001.format(name=before or after, opts=self.pipe_names) )
def function[add_pipe, parameter[self, component, name, before, after, first, last]]: constant[Add a component to the processing pipeline. Valid components are callables that take a `Doc` object, modify it and return it. Only one of before/after/first/last can be set. Default behaviour is "last". component (callable): The pipeline component. name (unicode): Name of pipeline component. Overwrites existing component.name attribute if available. If no name is set and the component exposes no name attribute, component.__name__ is used. An error is raised if a name already exists in the pipeline. before (unicode): Component name to insert component directly before. after (unicode): Component name to insert component directly after. first (bool): Insert component first / not first in the pipeline. last (bool): Insert component last / not last in the pipeline. DOCS: https://spacy.io/api/language#add_pipe ] if <ast.UnaryOp object at 0x7da1b1ef82e0> begin[:] variable[msg] assign[=] call[name[Errors].E003.format, parameter[]] if <ast.BoolOp object at 0x7da1b1ef84f0> begin[:] <ast.AugAssign object at 0x7da1b1ef8af0> <ast.Raise object at 0x7da1b1ef8a00> if compare[name[name] is constant[None]] begin[:] if call[name[hasattr], parameter[name[component], constant[name]]] begin[:] variable[name] assign[=] name[component].name if compare[name[name] in name[self].pipe_names] begin[:] <ast.Raise object at 0x7da1b202bf70> if compare[call[name[sum], parameter[list[[<ast.Call object at 0x7da1b1e13400>, <ast.Call object at 0x7da1b1e11150>, <ast.Call object at 0x7da1b1e13b80>, <ast.Call object at 0x7da1b1e11c60>]]]] greater_or_equal[>=] constant[2]] begin[:] <ast.Raise object at 0x7da1b1e12cb0> variable[pipe] assign[=] tuple[[<ast.Name object at 0x7da1b1e13ac0>, <ast.Name object at 0x7da1b1e13430>]] if <ast.BoolOp object at 0x7da1b1e13a30> begin[:] call[name[self].pipeline.append, parameter[name[pipe]]]
keyword[def] identifier[add_pipe] ( identifier[self] , identifier[component] , identifier[name] = keyword[None] , identifier[before] = keyword[None] , identifier[after] = keyword[None] , identifier[first] = keyword[None] , identifier[last] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[component] , literal[string] ): identifier[msg] = identifier[Errors] . identifier[E003] . identifier[format] ( identifier[component] = identifier[repr] ( identifier[component] ), identifier[name] = identifier[name] ) keyword[if] identifier[isinstance] ( identifier[component] , identifier[basestring_] ) keyword[and] identifier[component] keyword[in] identifier[self] . identifier[factories] : identifier[msg] += identifier[Errors] . identifier[E004] . identifier[format] ( identifier[component] = identifier[component] ) keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[if] identifier[name] keyword[is] keyword[None] : keyword[if] identifier[hasattr] ( identifier[component] , literal[string] ): identifier[name] = identifier[component] . identifier[name] keyword[elif] identifier[hasattr] ( identifier[component] , literal[string] ): identifier[name] = identifier[component] . identifier[__name__] keyword[elif] identifier[hasattr] ( identifier[component] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[component] . identifier[__class__] , literal[string] ): identifier[name] = identifier[component] . identifier[__class__] . identifier[__name__] keyword[else] : identifier[name] = identifier[repr] ( identifier[component] ) keyword[if] identifier[name] keyword[in] identifier[self] . identifier[pipe_names] : keyword[raise] identifier[ValueError] ( identifier[Errors] . identifier[E007] . identifier[format] ( identifier[name] = identifier[name] , identifier[opts] = identifier[self] . identifier[pipe_names] )) keyword[if] identifier[sum] ([ identifier[bool] ( identifier[before] ), identifier[bool] ( identifier[after] ), identifier[bool] ( identifier[first] ), identifier[bool] ( identifier[last] )])>= literal[int] : keyword[raise] identifier[ValueError] ( identifier[Errors] . identifier[E006] ) identifier[pipe] =( identifier[name] , identifier[component] ) keyword[if] identifier[last] keyword[or] keyword[not] identifier[any] ([ identifier[first] , identifier[before] , identifier[after] ]): identifier[self] . identifier[pipeline] . identifier[append] ( identifier[pipe] ) keyword[elif] identifier[first] : identifier[self] . identifier[pipeline] . identifier[insert] ( literal[int] , identifier[pipe] ) keyword[elif] identifier[before] keyword[and] identifier[before] keyword[in] identifier[self] . identifier[pipe_names] : identifier[self] . identifier[pipeline] . identifier[insert] ( identifier[self] . identifier[pipe_names] . identifier[index] ( identifier[before] ), identifier[pipe] ) keyword[elif] identifier[after] keyword[and] identifier[after] keyword[in] identifier[self] . identifier[pipe_names] : identifier[self] . identifier[pipeline] . identifier[insert] ( identifier[self] . identifier[pipe_names] . identifier[index] ( identifier[after] )+ literal[int] , identifier[pipe] ) keyword[else] : keyword[raise] identifier[ValueError] ( identifier[Errors] . identifier[E001] . identifier[format] ( identifier[name] = identifier[before] keyword[or] identifier[after] , identifier[opts] = identifier[self] . identifier[pipe_names] ) )
def add_pipe(self, component, name=None, before=None, after=None, first=None, last=None): """Add a component to the processing pipeline. Valid components are callables that take a `Doc` object, modify it and return it. Only one of before/after/first/last can be set. Default behaviour is "last". component (callable): The pipeline component. name (unicode): Name of pipeline component. Overwrites existing component.name attribute if available. If no name is set and the component exposes no name attribute, component.__name__ is used. An error is raised if a name already exists in the pipeline. before (unicode): Component name to insert component directly before. after (unicode): Component name to insert component directly after. first (bool): Insert component first / not first in the pipeline. last (bool): Insert component last / not last in the pipeline. DOCS: https://spacy.io/api/language#add_pipe """ if not hasattr(component, '__call__'): msg = Errors.E003.format(component=repr(component), name=name) if isinstance(component, basestring_) and component in self.factories: msg += Errors.E004.format(component=component) # depends on [control=['if'], data=[]] raise ValueError(msg) # depends on [control=['if'], data=[]] if name is None: if hasattr(component, 'name'): name = component.name # depends on [control=['if'], data=[]] elif hasattr(component, '__name__'): name = component.__name__ # depends on [control=['if'], data=[]] elif hasattr(component, '__class__') and hasattr(component.__class__, '__name__'): name = component.__class__.__name__ # depends on [control=['if'], data=[]] else: name = repr(component) # depends on [control=['if'], data=['name']] if name in self.pipe_names: raise ValueError(Errors.E007.format(name=name, opts=self.pipe_names)) # depends on [control=['if'], data=['name']] if sum([bool(before), bool(after), bool(first), bool(last)]) >= 2: raise ValueError(Errors.E006) # depends on [control=['if'], data=[]] pipe = (name, component) if last or not any([first, before, after]): self.pipeline.append(pipe) # depends on [control=['if'], data=[]] elif first: self.pipeline.insert(0, pipe) # depends on [control=['if'], data=[]] elif before and before in self.pipe_names: self.pipeline.insert(self.pipe_names.index(before), pipe) # depends on [control=['if'], data=[]] elif after and after in self.pipe_names: self.pipeline.insert(self.pipe_names.index(after) + 1, pipe) # depends on [control=['if'], data=[]] else: raise ValueError(Errors.E001.format(name=before or after, opts=self.pipe_names))
def getFileSystemSize(dirPath): """ Return the free space, and total size of the file system hosting `dirPath`. :param str dirPath: A valid path to a directory. :return: free space and total size of file system :rtype: tuple """ assert os.path.exists(dirPath) diskStats = os.statvfs(dirPath) freeSpace = diskStats.f_frsize * diskStats.f_bavail diskSize = diskStats.f_frsize * diskStats.f_blocks return freeSpace, diskSize
def function[getFileSystemSize, parameter[dirPath]]: constant[ Return the free space, and total size of the file system hosting `dirPath`. :param str dirPath: A valid path to a directory. :return: free space and total size of file system :rtype: tuple ] assert[call[name[os].path.exists, parameter[name[dirPath]]]] variable[diskStats] assign[=] call[name[os].statvfs, parameter[name[dirPath]]] variable[freeSpace] assign[=] binary_operation[name[diskStats].f_frsize * name[diskStats].f_bavail] variable[diskSize] assign[=] binary_operation[name[diskStats].f_frsize * name[diskStats].f_blocks] return[tuple[[<ast.Name object at 0x7da2044c1420>, <ast.Name object at 0x7da2044c2fb0>]]]
keyword[def] identifier[getFileSystemSize] ( identifier[dirPath] ): literal[string] keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[dirPath] ) identifier[diskStats] = identifier[os] . identifier[statvfs] ( identifier[dirPath] ) identifier[freeSpace] = identifier[diskStats] . identifier[f_frsize] * identifier[diskStats] . identifier[f_bavail] identifier[diskSize] = identifier[diskStats] . identifier[f_frsize] * identifier[diskStats] . identifier[f_blocks] keyword[return] identifier[freeSpace] , identifier[diskSize]
def getFileSystemSize(dirPath): """ Return the free space, and total size of the file system hosting `dirPath`. :param str dirPath: A valid path to a directory. :return: free space and total size of file system :rtype: tuple """ assert os.path.exists(dirPath) diskStats = os.statvfs(dirPath) freeSpace = diskStats.f_frsize * diskStats.f_bavail diskSize = diskStats.f_frsize * diskStats.f_blocks return (freeSpace, diskSize)
def OnExitSelectionMode(self, event): """Event handler for leaving selection mode, enables cell edits""" self.grid.sel_mode_cursor = None self.grid.EnableDragGridSize(True) self.grid.EnableEditing(True)
def function[OnExitSelectionMode, parameter[self, event]]: constant[Event handler for leaving selection mode, enables cell edits] name[self].grid.sel_mode_cursor assign[=] constant[None] call[name[self].grid.EnableDragGridSize, parameter[constant[True]]] call[name[self].grid.EnableEditing, parameter[constant[True]]]
keyword[def] identifier[OnExitSelectionMode] ( identifier[self] , identifier[event] ): literal[string] identifier[self] . identifier[grid] . identifier[sel_mode_cursor] = keyword[None] identifier[self] . identifier[grid] . identifier[EnableDragGridSize] ( keyword[True] ) identifier[self] . identifier[grid] . identifier[EnableEditing] ( keyword[True] )
def OnExitSelectionMode(self, event): """Event handler for leaving selection mode, enables cell edits""" self.grid.sel_mode_cursor = None self.grid.EnableDragGridSize(True) self.grid.EnableEditing(True)
def __analizar_observaciones(self, ret): "Comprueba y extrae observaciones si existen en la respuesta XML" self.Observaciones = [obs["codigoDescripcion"] for obs in ret.get('arrayObservaciones', [])] self.Obs = '\n'.join(["%(codigo)s: %(descripcion)s" % obs for obs in self.Observaciones])
def function[__analizar_observaciones, parameter[self, ret]]: constant[Comprueba y extrae observaciones si existen en la respuesta XML] name[self].Observaciones assign[=] <ast.ListComp object at 0x7da18f58f6d0> name[self].Obs assign[=] call[constant[ ].join, parameter[<ast.ListComp object at 0x7da18f58d210>]]
keyword[def] identifier[__analizar_observaciones] ( identifier[self] , identifier[ret] ): literal[string] identifier[self] . identifier[Observaciones] =[ identifier[obs] [ literal[string] ] keyword[for] identifier[obs] keyword[in] identifier[ret] . identifier[get] ( literal[string] ,[])] identifier[self] . identifier[Obs] = literal[string] . identifier[join] ([ literal[string] % identifier[obs] keyword[for] identifier[obs] keyword[in] identifier[self] . identifier[Observaciones] ])
def __analizar_observaciones(self, ret): """Comprueba y extrae observaciones si existen en la respuesta XML""" self.Observaciones = [obs['codigoDescripcion'] for obs in ret.get('arrayObservaciones', [])] self.Obs = '\n'.join(['%(codigo)s: %(descripcion)s' % obs for obs in self.Observaciones])
def dumps(obj, *transformers): """ Serializes Java primitive data and objects unmarshaled by load(s) before into string. :param obj: A Python primitive object, or one loaded using load(s) :param transformers: Custom transformers to use :return: The serialized data as a string """ marshaller = JavaObjectMarshaller() # Add custom transformers for transformer in transformers: marshaller.add_transformer(transformer) return marshaller.dump(obj)
def function[dumps, parameter[obj]]: constant[ Serializes Java primitive data and objects unmarshaled by load(s) before into string. :param obj: A Python primitive object, or one loaded using load(s) :param transformers: Custom transformers to use :return: The serialized data as a string ] variable[marshaller] assign[=] call[name[JavaObjectMarshaller], parameter[]] for taget[name[transformer]] in starred[name[transformers]] begin[:] call[name[marshaller].add_transformer, parameter[name[transformer]]] return[call[name[marshaller].dump, parameter[name[obj]]]]
keyword[def] identifier[dumps] ( identifier[obj] ,* identifier[transformers] ): literal[string] identifier[marshaller] = identifier[JavaObjectMarshaller] () keyword[for] identifier[transformer] keyword[in] identifier[transformers] : identifier[marshaller] . identifier[add_transformer] ( identifier[transformer] ) keyword[return] identifier[marshaller] . identifier[dump] ( identifier[obj] )
def dumps(obj, *transformers): """ Serializes Java primitive data and objects unmarshaled by load(s) before into string. :param obj: A Python primitive object, or one loaded using load(s) :param transformers: Custom transformers to use :return: The serialized data as a string """ marshaller = JavaObjectMarshaller() # Add custom transformers for transformer in transformers: marshaller.add_transformer(transformer) # depends on [control=['for'], data=['transformer']] return marshaller.dump(obj)
def get_gates(): """ get all gates known on the Ariane server :return: """ LOGGER.debug("GateService.get_gates") params = SessionService.complete_transactional_req(None) if params is None: if MappingService.driver_type != DriverFactory.DRIVER_REST: params = {'OPERATION': 'getGates'} args = {'properties': params} else: args = {'http_operation': 'GET', 'operation_path': ''} else: if MappingService.driver_type != DriverFactory.DRIVER_REST: params['OPERATION'] = 'getGates' args = {'properties': params} else: args = {'http_operation': 'GET', 'operation_path': '', 'parameters': params} response = GateService.requester.call(args) if MappingService.driver_type != DriverFactory.DRIVER_REST: response = response.get() ret = None if response.rc == 0: ret = [] for gate in response.response_content['gates']: ret.append(Gate.json_2_gate(gate)) elif response.rc != 404: err_msg = 'GateService.get_gates - Problem while getting nodes. ' \ '. Reason: ' + str(response.response_content) + ' - ' + str(response.error_message) + \ " (" + str(response.rc) + ")" LOGGER.warning(err_msg) if response.rc == 500 and ArianeMappingOverloadError.ERROR_MSG in response.error_message: raise ArianeMappingOverloadError("GateService.get_gates", ArianeMappingOverloadError.ERROR_MSG) # traceback.print_stack() return ret
def function[get_gates, parameter[]]: constant[ get all gates known on the Ariane server :return: ] call[name[LOGGER].debug, parameter[constant[GateService.get_gates]]] variable[params] assign[=] call[name[SessionService].complete_transactional_req, parameter[constant[None]]] if compare[name[params] is constant[None]] begin[:] if compare[name[MappingService].driver_type not_equal[!=] name[DriverFactory].DRIVER_REST] begin[:] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c796740>], [<ast.Constant object at 0x7da20c794310>]] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da20c795360>], [<ast.Name object at 0x7da20c795de0>]] variable[response] assign[=] call[name[GateService].requester.call, parameter[name[args]]] if compare[name[MappingService].driver_type not_equal[!=] name[DriverFactory].DRIVER_REST] begin[:] variable[response] assign[=] call[name[response].get, parameter[]] variable[ret] assign[=] constant[None] if compare[name[response].rc equal[==] constant[0]] begin[:] variable[ret] assign[=] list[[]] for taget[name[gate]] in starred[call[name[response].response_content][constant[gates]]] begin[:] call[name[ret].append, parameter[call[name[Gate].json_2_gate, parameter[name[gate]]]]] return[name[ret]]
keyword[def] identifier[get_gates] (): literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] ) identifier[params] = identifier[SessionService] . identifier[complete_transactional_req] ( keyword[None] ) keyword[if] identifier[params] keyword[is] keyword[None] : keyword[if] identifier[MappingService] . identifier[driver_type] != identifier[DriverFactory] . identifier[DRIVER_REST] : identifier[params] ={ literal[string] : literal[string] } identifier[args] ={ literal[string] : identifier[params] } keyword[else] : identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] } keyword[else] : keyword[if] identifier[MappingService] . identifier[driver_type] != identifier[DriverFactory] . identifier[DRIVER_REST] : identifier[params] [ literal[string] ]= literal[string] identifier[args] ={ literal[string] : identifier[params] } keyword[else] : identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] } identifier[response] = identifier[GateService] . identifier[requester] . identifier[call] ( identifier[args] ) keyword[if] identifier[MappingService] . identifier[driver_type] != identifier[DriverFactory] . identifier[DRIVER_REST] : identifier[response] = identifier[response] . identifier[get] () identifier[ret] = keyword[None] keyword[if] identifier[response] . identifier[rc] == literal[int] : identifier[ret] =[] keyword[for] identifier[gate] keyword[in] identifier[response] . identifier[response_content] [ literal[string] ]: identifier[ret] . identifier[append] ( identifier[Gate] . identifier[json_2_gate] ( identifier[gate] )) keyword[elif] identifier[response] . identifier[rc] != literal[int] : identifier[err_msg] = literal[string] literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string] identifier[LOGGER] . identifier[warning] ( identifier[err_msg] ) keyword[if] identifier[response] . identifier[rc] == literal[int] keyword[and] identifier[ArianeMappingOverloadError] . identifier[ERROR_MSG] keyword[in] identifier[response] . identifier[error_message] : keyword[raise] identifier[ArianeMappingOverloadError] ( literal[string] , identifier[ArianeMappingOverloadError] . identifier[ERROR_MSG] ) keyword[return] identifier[ret]
def get_gates(): """ get all gates known on the Ariane server :return: """ LOGGER.debug('GateService.get_gates') params = SessionService.complete_transactional_req(None) if params is None: if MappingService.driver_type != DriverFactory.DRIVER_REST: params = {'OPERATION': 'getGates'} args = {'properties': params} # depends on [control=['if'], data=[]] else: args = {'http_operation': 'GET', 'operation_path': ''} # depends on [control=['if'], data=['params']] elif MappingService.driver_type != DriverFactory.DRIVER_REST: params['OPERATION'] = 'getGates' args = {'properties': params} # depends on [control=['if'], data=[]] else: args = {'http_operation': 'GET', 'operation_path': '', 'parameters': params} response = GateService.requester.call(args) if MappingService.driver_type != DriverFactory.DRIVER_REST: response = response.get() # depends on [control=['if'], data=[]] ret = None if response.rc == 0: ret = [] for gate in response.response_content['gates']: ret.append(Gate.json_2_gate(gate)) # depends on [control=['for'], data=['gate']] # depends on [control=['if'], data=[]] elif response.rc != 404: err_msg = 'GateService.get_gates - Problem while getting nodes. . Reason: ' + str(response.response_content) + ' - ' + str(response.error_message) + ' (' + str(response.rc) + ')' LOGGER.warning(err_msg) if response.rc == 500 and ArianeMappingOverloadError.ERROR_MSG in response.error_message: raise ArianeMappingOverloadError('GateService.get_gates', ArianeMappingOverloadError.ERROR_MSG) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # traceback.print_stack() return ret
def no_intersections(nodes1, degree1, nodes2, degree2): r"""Determine if one surface is in the other. Helper for :func:`combine_intersections` that handles the case of no points of intersection. In this case, either the surfaces are disjoint or one is fully contained in the other. To check containment, it's enough to check if one of the corners is contained in the other surface. Args: nodes1 (numpy.ndarray): The nodes defining the first surface in the intersection (assumed in :math:\mathbf{R}^2`). degree1 (int): The degree of the surface given by ``nodes1``. nodes2 (numpy.ndarray): The nodes defining the second surface in the intersection (assumed in :math:\mathbf{R}^2`). degree2 (int): The degree of the surface given by ``nodes2``. Returns: Tuple[Optional[list], Optional[bool]]: Pair (2-tuple) of * Edges info list; will be empty or :data:`None` * "Contained" boolean. If not :data:`None`, indicates that one of the surfaces is contained in the other. """ # NOTE: This is a circular import. from bezier import _surface_intersection located = _surface_intersection.locate_point( nodes2, degree2, nodes1[0, 0], nodes1[1, 0] ) if located is not None: return None, True located = _surface_intersection.locate_point( nodes1, degree1, nodes2[0, 0], nodes2[1, 0] ) if located is not None: return None, False return [], None
def function[no_intersections, parameter[nodes1, degree1, nodes2, degree2]]: constant[Determine if one surface is in the other. Helper for :func:`combine_intersections` that handles the case of no points of intersection. In this case, either the surfaces are disjoint or one is fully contained in the other. To check containment, it's enough to check if one of the corners is contained in the other surface. Args: nodes1 (numpy.ndarray): The nodes defining the first surface in the intersection (assumed in :math:\mathbf{R}^2`). degree1 (int): The degree of the surface given by ``nodes1``. nodes2 (numpy.ndarray): The nodes defining the second surface in the intersection (assumed in :math:\mathbf{R}^2`). degree2 (int): The degree of the surface given by ``nodes2``. Returns: Tuple[Optional[list], Optional[bool]]: Pair (2-tuple) of * Edges info list; will be empty or :data:`None` * "Contained" boolean. If not :data:`None`, indicates that one of the surfaces is contained in the other. ] from relative_module[bezier] import module[_surface_intersection] variable[located] assign[=] call[name[_surface_intersection].locate_point, parameter[name[nodes2], name[degree2], call[name[nodes1]][tuple[[<ast.Constant object at 0x7da20e963550>, <ast.Constant object at 0x7da20e962590>]]], call[name[nodes1]][tuple[[<ast.Constant object at 0x7da20e961150>, <ast.Constant object at 0x7da20e9612d0>]]]]] if compare[name[located] is_not constant[None]] begin[:] return[tuple[[<ast.Constant object at 0x7da18f00c040>, <ast.Constant object at 0x7da18eb55db0>]]] variable[located] assign[=] call[name[_surface_intersection].locate_point, parameter[name[nodes1], name[degree1], call[name[nodes2]][tuple[[<ast.Constant object at 0x7da18eb57010>, <ast.Constant object at 0x7da18eb56410>]]], call[name[nodes2]][tuple[[<ast.Constant object at 0x7da18eb54730>, <ast.Constant object at 0x7da18eb56890>]]]]] if compare[name[located] is_not constant[None]] begin[:] return[tuple[[<ast.Constant object at 0x7da18eb572e0>, <ast.Constant object at 0x7da18eb54c70>]]] return[tuple[[<ast.List object at 0x7da18eb54c10>, <ast.Constant object at 0x7da18eb566e0>]]]
keyword[def] identifier[no_intersections] ( identifier[nodes1] , identifier[degree1] , identifier[nodes2] , identifier[degree2] ): literal[string] keyword[from] identifier[bezier] keyword[import] identifier[_surface_intersection] identifier[located] = identifier[_surface_intersection] . identifier[locate_point] ( identifier[nodes2] , identifier[degree2] , identifier[nodes1] [ literal[int] , literal[int] ], identifier[nodes1] [ literal[int] , literal[int] ] ) keyword[if] identifier[located] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[None] , keyword[True] identifier[located] = identifier[_surface_intersection] . identifier[locate_point] ( identifier[nodes1] , identifier[degree1] , identifier[nodes2] [ literal[int] , literal[int] ], identifier[nodes2] [ literal[int] , literal[int] ] ) keyword[if] identifier[located] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[None] , keyword[False] keyword[return] [], keyword[None]
def no_intersections(nodes1, degree1, nodes2, degree2): """Determine if one surface is in the other. Helper for :func:`combine_intersections` that handles the case of no points of intersection. In this case, either the surfaces are disjoint or one is fully contained in the other. To check containment, it's enough to check if one of the corners is contained in the other surface. Args: nodes1 (numpy.ndarray): The nodes defining the first surface in the intersection (assumed in :math:\\mathbf{R}^2`). degree1 (int): The degree of the surface given by ``nodes1``. nodes2 (numpy.ndarray): The nodes defining the second surface in the intersection (assumed in :math:\\mathbf{R}^2`). degree2 (int): The degree of the surface given by ``nodes2``. Returns: Tuple[Optional[list], Optional[bool]]: Pair (2-tuple) of * Edges info list; will be empty or :data:`None` * "Contained" boolean. If not :data:`None`, indicates that one of the surfaces is contained in the other. """ # NOTE: This is a circular import. from bezier import _surface_intersection located = _surface_intersection.locate_point(nodes2, degree2, nodes1[0, 0], nodes1[1, 0]) if located is not None: return (None, True) # depends on [control=['if'], data=[]] located = _surface_intersection.locate_point(nodes1, degree1, nodes2[0, 0], nodes2[1, 0]) if located is not None: return (None, False) # depends on [control=['if'], data=[]] return ([], None)
def keys_breadth_first(self, include_dicts=False): """a generator that returns all the keys in a set of nested DotDict instances. The keys take the form X.Y.Z""" namespaces = [] for key in self._key_order: if isinstance(getattr(self, key), DotDict): namespaces.append(key) if include_dicts: yield key else: yield key for a_namespace in namespaces: for key in self[a_namespace].keys_breadth_first(include_dicts): yield '%s.%s' % (a_namespace, key)
def function[keys_breadth_first, parameter[self, include_dicts]]: constant[a generator that returns all the keys in a set of nested DotDict instances. The keys take the form X.Y.Z] variable[namespaces] assign[=] list[[]] for taget[name[key]] in starred[name[self]._key_order] begin[:] if call[name[isinstance], parameter[call[name[getattr], parameter[name[self], name[key]]], name[DotDict]]] begin[:] call[name[namespaces].append, parameter[name[key]]] if name[include_dicts] begin[:] <ast.Yield object at 0x7da18f813a90> for taget[name[a_namespace]] in starred[name[namespaces]] begin[:] for taget[name[key]] in starred[call[call[name[self]][name[a_namespace]].keys_breadth_first, parameter[name[include_dicts]]]] begin[:] <ast.Yield object at 0x7da18f811750>
keyword[def] identifier[keys_breadth_first] ( identifier[self] , identifier[include_dicts] = keyword[False] ): literal[string] identifier[namespaces] =[] keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_key_order] : keyword[if] identifier[isinstance] ( identifier[getattr] ( identifier[self] , identifier[key] ), identifier[DotDict] ): identifier[namespaces] . identifier[append] ( identifier[key] ) keyword[if] identifier[include_dicts] : keyword[yield] identifier[key] keyword[else] : keyword[yield] identifier[key] keyword[for] identifier[a_namespace] keyword[in] identifier[namespaces] : keyword[for] identifier[key] keyword[in] identifier[self] [ identifier[a_namespace] ]. identifier[keys_breadth_first] ( identifier[include_dicts] ): keyword[yield] literal[string] %( identifier[a_namespace] , identifier[key] )
def keys_breadth_first(self, include_dicts=False): """a generator that returns all the keys in a set of nested DotDict instances. The keys take the form X.Y.Z""" namespaces = [] for key in self._key_order: if isinstance(getattr(self, key), DotDict): namespaces.append(key) if include_dicts: yield key # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: yield key # depends on [control=['for'], data=['key']] for a_namespace in namespaces: for key in self[a_namespace].keys_breadth_first(include_dicts): yield ('%s.%s' % (a_namespace, key)) # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['a_namespace']]
def has_nrows( state, incorrect_msg="Your query returned a table with {{n_stu}} row{{'s' if n_stu > 1 else ''}} while it should return a table with {{n_sol}} row{{'s' if n_sol > 1 else ''}}.", ): """Test whether the student and solution query results have equal numbers of rows. Args: incorrect_msg: If specified, this overrides the automatically generated feedback message in case the number of rows in the student and solution query don't match. """ # check that query returned something has_result(state) # assumes that columns cannot be jagged in size n_stu = len(next(iter(state.student_result.values()))) n_sol = len(next(iter(state.solution_result.values()))) if n_stu != n_sol: _msg = state.build_message( incorrect_msg, fmt_kwargs={"n_stu": n_stu, "n_sol": n_sol} ) state.do_test(_msg) return state
def function[has_nrows, parameter[state, incorrect_msg]]: constant[Test whether the student and solution query results have equal numbers of rows. Args: incorrect_msg: If specified, this overrides the automatically generated feedback message in case the number of rows in the student and solution query don't match. ] call[name[has_result], parameter[name[state]]] variable[n_stu] assign[=] call[name[len], parameter[call[name[next], parameter[call[name[iter], parameter[call[name[state].student_result.values, parameter[]]]]]]]] variable[n_sol] assign[=] call[name[len], parameter[call[name[next], parameter[call[name[iter], parameter[call[name[state].solution_result.values, parameter[]]]]]]]] if compare[name[n_stu] not_equal[!=] name[n_sol]] begin[:] variable[_msg] assign[=] call[name[state].build_message, parameter[name[incorrect_msg]]] call[name[state].do_test, parameter[name[_msg]]] return[name[state]]
keyword[def] identifier[has_nrows] ( identifier[state] , identifier[incorrect_msg] = literal[string] , ): literal[string] identifier[has_result] ( identifier[state] ) identifier[n_stu] = identifier[len] ( identifier[next] ( identifier[iter] ( identifier[state] . identifier[student_result] . identifier[values] ()))) identifier[n_sol] = identifier[len] ( identifier[next] ( identifier[iter] ( identifier[state] . identifier[solution_result] . identifier[values] ()))) keyword[if] identifier[n_stu] != identifier[n_sol] : identifier[_msg] = identifier[state] . identifier[build_message] ( identifier[incorrect_msg] , identifier[fmt_kwargs] ={ literal[string] : identifier[n_stu] , literal[string] : identifier[n_sol] } ) identifier[state] . identifier[do_test] ( identifier[_msg] ) keyword[return] identifier[state]
def has_nrows(state, incorrect_msg="Your query returned a table with {{n_stu}} row{{'s' if n_stu > 1 else ''}} while it should return a table with {{n_sol}} row{{'s' if n_sol > 1 else ''}}."): """Test whether the student and solution query results have equal numbers of rows. Args: incorrect_msg: If specified, this overrides the automatically generated feedback message in case the number of rows in the student and solution query don't match. """ # check that query returned something has_result(state) # assumes that columns cannot be jagged in size n_stu = len(next(iter(state.student_result.values()))) n_sol = len(next(iter(state.solution_result.values()))) if n_stu != n_sol: _msg = state.build_message(incorrect_msg, fmt_kwargs={'n_stu': n_stu, 'n_sol': n_sol}) state.do_test(_msg) # depends on [control=['if'], data=['n_stu', 'n_sol']] return state
def delaunay_2d(self, tol=1e-05, alpha=0.0, offset=1.0, bound=False): """Apply a delaunay 2D filter along the best fitting plane. This extracts the grid's points and perfoms the triangulation on those alone. """ return PolyData(self.points).delaunay_2d(tol=tol, alpha=alpha, offset=offset, bound=bound)
def function[delaunay_2d, parameter[self, tol, alpha, offset, bound]]: constant[Apply a delaunay 2D filter along the best fitting plane. This extracts the grid's points and perfoms the triangulation on those alone. ] return[call[call[name[PolyData], parameter[name[self].points]].delaunay_2d, parameter[]]]
keyword[def] identifier[delaunay_2d] ( identifier[self] , identifier[tol] = literal[int] , identifier[alpha] = literal[int] , identifier[offset] = literal[int] , identifier[bound] = keyword[False] ): literal[string] keyword[return] identifier[PolyData] ( identifier[self] . identifier[points] ). identifier[delaunay_2d] ( identifier[tol] = identifier[tol] , identifier[alpha] = identifier[alpha] , identifier[offset] = identifier[offset] , identifier[bound] = identifier[bound] )
def delaunay_2d(self, tol=1e-05, alpha=0.0, offset=1.0, bound=False): """Apply a delaunay 2D filter along the best fitting plane. This extracts the grid's points and perfoms the triangulation on those alone. """ return PolyData(self.points).delaunay_2d(tol=tol, alpha=alpha, offset=offset, bound=bound)
def is_debugged(self): """ Tries to determine if the process is being debugged by another process. It may detect other debuggers besides WinAppDbg. @rtype: bool @return: C{True} if the process has a debugger attached. @warning: May return inaccurate results when some anti-debug techniques are used by the target process. @note: To know if a process currently being debugged by a L{Debug} object, call L{Debug.is_debugee} instead. """ # FIXME the MSDN docs don't say what access rights are needed here! hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) return win32.CheckRemoteDebuggerPresent(hProcess)
def function[is_debugged, parameter[self]]: constant[ Tries to determine if the process is being debugged by another process. It may detect other debuggers besides WinAppDbg. @rtype: bool @return: C{True} if the process has a debugger attached. @warning: May return inaccurate results when some anti-debug techniques are used by the target process. @note: To know if a process currently being debugged by a L{Debug} object, call L{Debug.is_debugee} instead. ] variable[hProcess] assign[=] call[name[self].get_handle, parameter[name[win32].PROCESS_QUERY_INFORMATION]] return[call[name[win32].CheckRemoteDebuggerPresent, parameter[name[hProcess]]]]
keyword[def] identifier[is_debugged] ( identifier[self] ): literal[string] identifier[hProcess] = identifier[self] . identifier[get_handle] ( identifier[win32] . identifier[PROCESS_QUERY_INFORMATION] ) keyword[return] identifier[win32] . identifier[CheckRemoteDebuggerPresent] ( identifier[hProcess] )
def is_debugged(self): """ Tries to determine if the process is being debugged by another process. It may detect other debuggers besides WinAppDbg. @rtype: bool @return: C{True} if the process has a debugger attached. @warning: May return inaccurate results when some anti-debug techniques are used by the target process. @note: To know if a process currently being debugged by a L{Debug} object, call L{Debug.is_debugee} instead. """ # FIXME the MSDN docs don't say what access rights are needed here! hProcess = self.get_handle(win32.PROCESS_QUERY_INFORMATION) return win32.CheckRemoteDebuggerPresent(hProcess)
def run_cli( executable, mets_url=None, resolver=None, workspace=None, page_id=None, log_level=None, input_file_grp=None, output_file_grp=None, parameter=None, working_dir=None, ): """ Create a workspace for mets_url and run MP CLI through it """ workspace = _get_workspace(workspace, resolver, mets_url, working_dir) args = [executable, '--working-dir', workspace.directory] args += ['--mets', mets_url] if log_level: args += ['--log-level', log_level] if page_id: args += ['--page-id', page_id] if input_file_grp: args += ['--input-file-grp', input_file_grp] if output_file_grp: args += ['--output-file-grp', output_file_grp] if parameter: args += ['--parameter', parameter] log.debug("Running subprocess '%s'", ' '.join(args)) return subprocess.call(args)
def function[run_cli, parameter[executable, mets_url, resolver, workspace, page_id, log_level, input_file_grp, output_file_grp, parameter, working_dir]]: constant[ Create a workspace for mets_url and run MP CLI through it ] variable[workspace] assign[=] call[name[_get_workspace], parameter[name[workspace], name[resolver], name[mets_url], name[working_dir]]] variable[args] assign[=] list[[<ast.Name object at 0x7da20e9b1690>, <ast.Constant object at 0x7da20e9b24d0>, <ast.Attribute object at 0x7da20e9b0880>]] <ast.AugAssign object at 0x7da20e9b0760> if name[log_level] begin[:] <ast.AugAssign object at 0x7da20e9b0df0> if name[page_id] begin[:] <ast.AugAssign object at 0x7da20e9b38b0> if name[input_file_grp] begin[:] <ast.AugAssign object at 0x7da20e9b0be0> if name[output_file_grp] begin[:] <ast.AugAssign object at 0x7da1b03828c0> if name[parameter] begin[:] <ast.AugAssign object at 0x7da1b0383910> call[name[log].debug, parameter[constant[Running subprocess '%s'], call[constant[ ].join, parameter[name[args]]]]] return[call[name[subprocess].call, parameter[name[args]]]]
keyword[def] identifier[run_cli] ( identifier[executable] , identifier[mets_url] = keyword[None] , identifier[resolver] = keyword[None] , identifier[workspace] = keyword[None] , identifier[page_id] = keyword[None] , identifier[log_level] = keyword[None] , identifier[input_file_grp] = keyword[None] , identifier[output_file_grp] = keyword[None] , identifier[parameter] = keyword[None] , identifier[working_dir] = keyword[None] , ): literal[string] identifier[workspace] = identifier[_get_workspace] ( identifier[workspace] , identifier[resolver] , identifier[mets_url] , identifier[working_dir] ) identifier[args] =[ identifier[executable] , literal[string] , identifier[workspace] . identifier[directory] ] identifier[args] +=[ literal[string] , identifier[mets_url] ] keyword[if] identifier[log_level] : identifier[args] +=[ literal[string] , identifier[log_level] ] keyword[if] identifier[page_id] : identifier[args] +=[ literal[string] , identifier[page_id] ] keyword[if] identifier[input_file_grp] : identifier[args] +=[ literal[string] , identifier[input_file_grp] ] keyword[if] identifier[output_file_grp] : identifier[args] +=[ literal[string] , identifier[output_file_grp] ] keyword[if] identifier[parameter] : identifier[args] +=[ literal[string] , identifier[parameter] ] identifier[log] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( identifier[args] )) keyword[return] identifier[subprocess] . identifier[call] ( identifier[args] )
def run_cli(executable, mets_url=None, resolver=None, workspace=None, page_id=None, log_level=None, input_file_grp=None, output_file_grp=None, parameter=None, working_dir=None): """ Create a workspace for mets_url and run MP CLI through it """ workspace = _get_workspace(workspace, resolver, mets_url, working_dir) args = [executable, '--working-dir', workspace.directory] args += ['--mets', mets_url] if log_level: args += ['--log-level', log_level] # depends on [control=['if'], data=[]] if page_id: args += ['--page-id', page_id] # depends on [control=['if'], data=[]] if input_file_grp: args += ['--input-file-grp', input_file_grp] # depends on [control=['if'], data=[]] if output_file_grp: args += ['--output-file-grp', output_file_grp] # depends on [control=['if'], data=[]] if parameter: args += ['--parameter', parameter] # depends on [control=['if'], data=[]] log.debug("Running subprocess '%s'", ' '.join(args)) return subprocess.call(args)
def to_credentials(arg): ''' to_credentials(arg) converts arg into a pair (key, secret) if arg can be coerced into such a pair and otherwise raises an error. Possible inputs include: * A tuple (key, secret) * A mapping with the keys 'key' and 'secret' * The name of a file that can load credentials via the load_credentials() function * A string that separates the key and secret by ':', e.g., 'mykey:mysecret' * A string that separates the key and secret by a "\n", e.g., "mykey\nmysecret" ''' if pimms.is_str(arg): try: return load_credentials(arg) except Exception: pass try: return str_to_credentials(arg) except Exception: raise ValueError('String "%s" is neither a file containing credentials nor a valid' ' credentials string itself.' % arg) elif pimms.is_map(arg) and 'key' in arg and 'secret' in arg: return (arg['key'], arg['secret']) elif pimms.is_vector(arg, str) and len(arg) == 2: return tuple(arg) else: raise ValueError('given argument cannot be coerced to credentials: %s' % arg)
def function[to_credentials, parameter[arg]]: constant[ to_credentials(arg) converts arg into a pair (key, secret) if arg can be coerced into such a pair and otherwise raises an error. Possible inputs include: * A tuple (key, secret) * A mapping with the keys 'key' and 'secret' * The name of a file that can load credentials via the load_credentials() function * A string that separates the key and secret by ':', e.g., 'mykey:mysecret' * A string that separates the key and secret by a " ", e.g., "mykey mysecret" ] if call[name[pimms].is_str, parameter[name[arg]]] begin[:] <ast.Try object at 0x7da20c6a93c0> <ast.Try object at 0x7da20c6a86a0>
keyword[def] identifier[to_credentials] ( identifier[arg] ): literal[string] keyword[if] identifier[pimms] . identifier[is_str] ( identifier[arg] ): keyword[try] : keyword[return] identifier[load_credentials] ( identifier[arg] ) keyword[except] identifier[Exception] : keyword[pass] keyword[try] : keyword[return] identifier[str_to_credentials] ( identifier[arg] ) keyword[except] identifier[Exception] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] % identifier[arg] ) keyword[elif] identifier[pimms] . identifier[is_map] ( identifier[arg] ) keyword[and] literal[string] keyword[in] identifier[arg] keyword[and] literal[string] keyword[in] identifier[arg] : keyword[return] ( identifier[arg] [ literal[string] ], identifier[arg] [ literal[string] ]) keyword[elif] identifier[pimms] . identifier[is_vector] ( identifier[arg] , identifier[str] ) keyword[and] identifier[len] ( identifier[arg] )== literal[int] : keyword[return] identifier[tuple] ( identifier[arg] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[arg] )
def to_credentials(arg): """ to_credentials(arg) converts arg into a pair (key, secret) if arg can be coerced into such a pair and otherwise raises an error. Possible inputs include: * A tuple (key, secret) * A mapping with the keys 'key' and 'secret' * The name of a file that can load credentials via the load_credentials() function * A string that separates the key and secret by ':', e.g., 'mykey:mysecret' * A string that separates the key and secret by a " ", e.g., "mykey mysecret" """ if pimms.is_str(arg): try: return load_credentials(arg) # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] try: return str_to_credentials(arg) # depends on [control=['try'], data=[]] except Exception: raise ValueError('String "%s" is neither a file containing credentials nor a valid credentials string itself.' % arg) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif pimms.is_map(arg) and 'key' in arg and ('secret' in arg): return (arg['key'], arg['secret']) # depends on [control=['if'], data=[]] elif pimms.is_vector(arg, str) and len(arg) == 2: return tuple(arg) # depends on [control=['if'], data=[]] else: raise ValueError('given argument cannot be coerced to credentials: %s' % arg)
def ToJson(self): """ Convert object members to a dictionary that can be parsed as JSON. Returns: dict: """ obj = { 'usage': self.Usage, 'data': '' if not self.Data else self.Data.hex() } return obj
def function[ToJson, parameter[self]]: constant[ Convert object members to a dictionary that can be parsed as JSON. Returns: dict: ] variable[obj] assign[=] dictionary[[<ast.Constant object at 0x7da1b1dd0700>, <ast.Constant object at 0x7da1b1dd0760>], [<ast.Attribute object at 0x7da1b1dd07f0>, <ast.IfExp object at 0x7da1b1dd1600>]] return[name[obj]]
keyword[def] identifier[ToJson] ( identifier[self] ): literal[string] identifier[obj] ={ literal[string] : identifier[self] . identifier[Usage] , literal[string] : literal[string] keyword[if] keyword[not] identifier[self] . identifier[Data] keyword[else] identifier[self] . identifier[Data] . identifier[hex] () } keyword[return] identifier[obj]
def ToJson(self): """ Convert object members to a dictionary that can be parsed as JSON. Returns: dict: """ obj = {'usage': self.Usage, 'data': '' if not self.Data else self.Data.hex()} return obj
def makevAndvPfuncs(self,policyFunc): ''' Constructs the marginal value function for this period. Parameters ---------- policyFunc : function Consumption and medical care function for this period, defined over market resources, permanent income level, and the medical need shock. Returns ------- vFunc : function Value function for this period, defined over market resources and permanent income. vPfunc : function Marginal value (of market resources) function for this period, defined over market resources and permanent income. ''' # Get state dimension sizes mCount = self.aXtraGrid.size pCount = self.pLvlGrid.size MedCount = self.MedShkVals.size # Make temporary grids to evaluate the consumption function temp_grid = np.tile(np.reshape(self.aXtraGrid,(mCount,1,1)),(1,pCount,MedCount)) aMinGrid = np.tile(np.reshape(self.mLvlMinNow(self.pLvlGrid),(1,pCount,1)), (mCount,1,MedCount)) pGrid = np.tile(np.reshape(self.pLvlGrid,(1,pCount,1)),(mCount,1,MedCount)) mGrid = temp_grid*pGrid + aMinGrid if self.pLvlGrid[0] == 0: mGrid[:,0,:] = np.tile(np.reshape(self.aXtraGrid,(mCount,1)),(1,MedCount)) MedShkGrid = np.tile(np.reshape(self.MedShkVals,(1,1,MedCount)),(mCount,pCount,1)) probsGrid = np.tile(np.reshape(self.MedShkPrbs,(1,1,MedCount)),(mCount,pCount,1)) # Get optimal consumption (and medical care) for each state cGrid,MedGrid = policyFunc(mGrid,pGrid,MedShkGrid) # Calculate expected value by "integrating" across medical shocks if self.vFuncBool: MedGrid = np.maximum(MedGrid,1e-100) # interpolation error sometimes makes Med < 0 (barely) aGrid = np.maximum(mGrid - cGrid - self.MedPrice*MedGrid, aMinGrid) # interpolation error sometimes makes tiny violations vGrid = self.u(cGrid) + MedShkGrid*self.uMed(MedGrid) + self.EndOfPrdvFunc(aGrid,pGrid) vNow = np.sum(vGrid*probsGrid,axis=2) # Calculate expected marginal value by "integrating" across medical shocks vPgrid = self.uP(cGrid) vPnow = np.sum(vPgrid*probsGrid,axis=2) # Add vPnvrs=0 at m=mLvlMin to close it off at the bottom (and vNvrs=0) mGrid_small = np.concatenate((np.reshape(self.mLvlMinNow(self.pLvlGrid),(1,pCount)),mGrid[:,:,0])) vPnvrsNow = np.concatenate((np.zeros((1,pCount)),self.uPinv(vPnow))) if self.vFuncBool: vNvrsNow = np.concatenate((np.zeros((1,pCount)),self.uinv(vNow)),axis=0) vNvrsPnow = vPnow*self.uinvP(vNow) vNvrsPnow = np.concatenate((np.zeros((1,pCount)),vNvrsPnow),axis=0) # Construct the pseudo-inverse value and marginal value functions over mLvl,pLvl vPnvrsFunc_by_pLvl = [] vNvrsFunc_by_pLvl = [] for j in range(pCount): # Make a pseudo inverse marginal value function for each pLvl pLvl = self.pLvlGrid[j] m_temp = mGrid_small[:,j] - self.mLvlMinNow(pLvl) vPnvrs_temp = vPnvrsNow[:,j] vPnvrsFunc_by_pLvl.append(LinearInterp(m_temp,vPnvrs_temp)) if self.vFuncBool: vNvrs_temp = vNvrsNow[:,j] vNvrsP_temp = vNvrsPnow[:,j] vNvrsFunc_by_pLvl.append(CubicInterp(m_temp,vNvrs_temp,vNvrsP_temp)) vPnvrsFuncBase = LinearInterpOnInterp1D(vPnvrsFunc_by_pLvl,self.pLvlGrid) vPnvrsFunc = VariableLowerBoundFunc2D(vPnvrsFuncBase,self.mLvlMinNow) # adjust for the lower bound of mLvl if self.vFuncBool: vNvrsFuncBase = LinearInterpOnInterp1D(vNvrsFunc_by_pLvl,self.pLvlGrid) vNvrsFunc = VariableLowerBoundFunc2D(vNvrsFuncBase,self.mLvlMinNow) # adjust for the lower bound of mLvl # "Re-curve" the (marginal) value function vPfunc = MargValueFunc2D(vPnvrsFunc,self.CRRA) if self.vFuncBool: vFunc = ValueFunc2D(vNvrsFunc,self.CRRA) else: vFunc = NullFunc() return vFunc, vPfunc
def function[makevAndvPfuncs, parameter[self, policyFunc]]: constant[ Constructs the marginal value function for this period. Parameters ---------- policyFunc : function Consumption and medical care function for this period, defined over market resources, permanent income level, and the medical need shock. Returns ------- vFunc : function Value function for this period, defined over market resources and permanent income. vPfunc : function Marginal value (of market resources) function for this period, defined over market resources and permanent income. ] variable[mCount] assign[=] name[self].aXtraGrid.size variable[pCount] assign[=] name[self].pLvlGrid.size variable[MedCount] assign[=] name[self].MedShkVals.size variable[temp_grid] assign[=] call[name[np].tile, parameter[call[name[np].reshape, parameter[name[self].aXtraGrid, tuple[[<ast.Name object at 0x7da1b074d720>, <ast.Constant object at 0x7da1b074d780>, <ast.Constant object at 0x7da1b074d810>]]]], tuple[[<ast.Constant object at 0x7da1b074d6f0>, <ast.Name object at 0x7da1b074f940>, <ast.Name object at 0x7da1b074f880>]]]] variable[aMinGrid] assign[=] call[name[np].tile, parameter[call[name[np].reshape, parameter[call[name[self].mLvlMinNow, parameter[name[self].pLvlGrid]], tuple[[<ast.Constant object at 0x7da1b074d1e0>, <ast.Name object at 0x7da1b074d9c0>, <ast.Constant object at 0x7da1b074db10>]]]], tuple[[<ast.Name object at 0x7da1b074e110>, <ast.Constant object at 0x7da1b074d030>, <ast.Name object at 0x7da1b074ef20>]]]] variable[pGrid] assign[=] call[name[np].tile, parameter[call[name[np].reshape, parameter[name[self].pLvlGrid, tuple[[<ast.Constant object at 0x7da1b074d000>, <ast.Name object at 0x7da1b074f0d0>, <ast.Constant object at 0x7da1b074cdf0>]]]], tuple[[<ast.Name object at 0x7da1b074c550>, <ast.Constant object at 0x7da1b074d3c0>, <ast.Name object at 0x7da1b074d0f0>]]]] variable[mGrid] assign[=] binary_operation[binary_operation[name[temp_grid] * name[pGrid]] + name[aMinGrid]] if compare[call[name[self].pLvlGrid][constant[0]] equal[==] constant[0]] begin[:] call[name[mGrid]][tuple[[<ast.Slice object at 0x7da1b074ff10>, <ast.Constant object at 0x7da1b074f4c0>, <ast.Slice object at 0x7da1b074f700>]]] assign[=] call[name[np].tile, parameter[call[name[np].reshape, parameter[name[self].aXtraGrid, tuple[[<ast.Name object at 0x7da1b074d690>, <ast.Constant object at 0x7da1b074e140>]]]], tuple[[<ast.Constant object at 0x7da1b074df00>, <ast.Name object at 0x7da1b074ce20>]]]] variable[MedShkGrid] assign[=] call[name[np].tile, parameter[call[name[np].reshape, parameter[name[self].MedShkVals, tuple[[<ast.Constant object at 0x7da1b074ee30>, <ast.Constant object at 0x7da1b074d390>, <ast.Name object at 0x7da1b074e8f0>]]]], tuple[[<ast.Name object at 0x7da1b074c2e0>, <ast.Name object at 0x7da1b074cd60>, <ast.Constant object at 0x7da1b074f280>]]]] variable[probsGrid] assign[=] call[name[np].tile, parameter[call[name[np].reshape, parameter[name[self].MedShkPrbs, tuple[[<ast.Constant object at 0x7da1b074c520>, <ast.Constant object at 0x7da1b074e470>, <ast.Name object at 0x7da1b074ea10>]]]], tuple[[<ast.Name object at 0x7da1b074f1c0>, <ast.Name object at 0x7da1b074faf0>, <ast.Constant object at 0x7da1b074f1f0>]]]] <ast.Tuple object at 0x7da1b074d420> assign[=] call[name[policyFunc], parameter[name[mGrid], name[pGrid], name[MedShkGrid]]] if name[self].vFuncBool begin[:] variable[MedGrid] assign[=] call[name[np].maximum, parameter[name[MedGrid], constant[1e-100]]] variable[aGrid] assign[=] call[name[np].maximum, parameter[binary_operation[binary_operation[name[mGrid] - name[cGrid]] - binary_operation[name[self].MedPrice * name[MedGrid]]], name[aMinGrid]]] variable[vGrid] assign[=] binary_operation[binary_operation[call[name[self].u, parameter[name[cGrid]]] + binary_operation[name[MedShkGrid] * call[name[self].uMed, parameter[name[MedGrid]]]]] + call[name[self].EndOfPrdvFunc, parameter[name[aGrid], name[pGrid]]]] variable[vNow] assign[=] call[name[np].sum, parameter[binary_operation[name[vGrid] * name[probsGrid]]]] variable[vPgrid] assign[=] call[name[self].uP, parameter[name[cGrid]]] variable[vPnow] assign[=] call[name[np].sum, parameter[binary_operation[name[vPgrid] * name[probsGrid]]]] variable[mGrid_small] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Call object at 0x7da1b074fe20>, <ast.Subscript object at 0x7da1b074d120>]]]] variable[vPnvrsNow] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Call object at 0x7da1b074e830>, <ast.Call object at 0x7da1b074e7d0>]]]] if name[self].vFuncBool begin[:] variable[vNvrsNow] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Call object at 0x7da1b23462f0>, <ast.Call object at 0x7da1b23457e0>]]]] variable[vNvrsPnow] assign[=] binary_operation[name[vPnow] * call[name[self].uinvP, parameter[name[vNow]]]] variable[vNvrsPnow] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Call object at 0x7da1b2344a60>, <ast.Name object at 0x7da1b2344c10>]]]] variable[vPnvrsFunc_by_pLvl] assign[=] list[[]] variable[vNvrsFunc_by_pLvl] assign[=] list[[]] for taget[name[j]] in starred[call[name[range], parameter[name[pCount]]]] begin[:] variable[pLvl] assign[=] call[name[self].pLvlGrid][name[j]] variable[m_temp] assign[=] binary_operation[call[name[mGrid_small]][tuple[[<ast.Slice object at 0x7da1b2344400>, <ast.Name object at 0x7da1b2345c90>]]] - call[name[self].mLvlMinNow, parameter[name[pLvl]]]] variable[vPnvrs_temp] assign[=] call[name[vPnvrsNow]][tuple[[<ast.Slice object at 0x7da1b2347430>, <ast.Name object at 0x7da1b2347640>]]] call[name[vPnvrsFunc_by_pLvl].append, parameter[call[name[LinearInterp], parameter[name[m_temp], name[vPnvrs_temp]]]]] if name[self].vFuncBool begin[:] variable[vNvrs_temp] assign[=] call[name[vNvrsNow]][tuple[[<ast.Slice object at 0x7da1b2344310>, <ast.Name object at 0x7da1b2345c00>]]] variable[vNvrsP_temp] assign[=] call[name[vNvrsPnow]][tuple[[<ast.Slice object at 0x7da1b2345840>, <ast.Name object at 0x7da1b2347580>]]] call[name[vNvrsFunc_by_pLvl].append, parameter[call[name[CubicInterp], parameter[name[m_temp], name[vNvrs_temp], name[vNvrsP_temp]]]]] variable[vPnvrsFuncBase] assign[=] call[name[LinearInterpOnInterp1D], parameter[name[vPnvrsFunc_by_pLvl], name[self].pLvlGrid]] variable[vPnvrsFunc] assign[=] call[name[VariableLowerBoundFunc2D], parameter[name[vPnvrsFuncBase], name[self].mLvlMinNow]] if name[self].vFuncBool begin[:] variable[vNvrsFuncBase] assign[=] call[name[LinearInterpOnInterp1D], parameter[name[vNvrsFunc_by_pLvl], name[self].pLvlGrid]] variable[vNvrsFunc] assign[=] call[name[VariableLowerBoundFunc2D], parameter[name[vNvrsFuncBase], name[self].mLvlMinNow]] variable[vPfunc] assign[=] call[name[MargValueFunc2D], parameter[name[vPnvrsFunc], name[self].CRRA]] if name[self].vFuncBool begin[:] variable[vFunc] assign[=] call[name[ValueFunc2D], parameter[name[vNvrsFunc], name[self].CRRA]] return[tuple[[<ast.Name object at 0x7da1b26ad5d0>, <ast.Name object at 0x7da1b26ac130>]]]
keyword[def] identifier[makevAndvPfuncs] ( identifier[self] , identifier[policyFunc] ): literal[string] identifier[mCount] = identifier[self] . identifier[aXtraGrid] . identifier[size] identifier[pCount] = identifier[self] . identifier[pLvlGrid] . identifier[size] identifier[MedCount] = identifier[self] . identifier[MedShkVals] . identifier[size] identifier[temp_grid] = identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[aXtraGrid] ,( identifier[mCount] , literal[int] , literal[int] )),( literal[int] , identifier[pCount] , identifier[MedCount] )) identifier[aMinGrid] = identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[mLvlMinNow] ( identifier[self] . identifier[pLvlGrid] ),( literal[int] , identifier[pCount] , literal[int] )), ( identifier[mCount] , literal[int] , identifier[MedCount] )) identifier[pGrid] = identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[pLvlGrid] ,( literal[int] , identifier[pCount] , literal[int] )),( identifier[mCount] , literal[int] , identifier[MedCount] )) identifier[mGrid] = identifier[temp_grid] * identifier[pGrid] + identifier[aMinGrid] keyword[if] identifier[self] . identifier[pLvlGrid] [ literal[int] ]== literal[int] : identifier[mGrid] [:, literal[int] ,:]= identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[aXtraGrid] ,( identifier[mCount] , literal[int] )),( literal[int] , identifier[MedCount] )) identifier[MedShkGrid] = identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[MedShkVals] ,( literal[int] , literal[int] , identifier[MedCount] )),( identifier[mCount] , identifier[pCount] , literal[int] )) identifier[probsGrid] = identifier[np] . identifier[tile] ( identifier[np] . identifier[reshape] ( identifier[self] . identifier[MedShkPrbs] ,( literal[int] , literal[int] , identifier[MedCount] )),( identifier[mCount] , identifier[pCount] , literal[int] )) identifier[cGrid] , identifier[MedGrid] = identifier[policyFunc] ( identifier[mGrid] , identifier[pGrid] , identifier[MedShkGrid] ) keyword[if] identifier[self] . identifier[vFuncBool] : identifier[MedGrid] = identifier[np] . identifier[maximum] ( identifier[MedGrid] , literal[int] ) identifier[aGrid] = identifier[np] . identifier[maximum] ( identifier[mGrid] - identifier[cGrid] - identifier[self] . identifier[MedPrice] * identifier[MedGrid] , identifier[aMinGrid] ) identifier[vGrid] = identifier[self] . identifier[u] ( identifier[cGrid] )+ identifier[MedShkGrid] * identifier[self] . identifier[uMed] ( identifier[MedGrid] )+ identifier[self] . identifier[EndOfPrdvFunc] ( identifier[aGrid] , identifier[pGrid] ) identifier[vNow] = identifier[np] . identifier[sum] ( identifier[vGrid] * identifier[probsGrid] , identifier[axis] = literal[int] ) identifier[vPgrid] = identifier[self] . identifier[uP] ( identifier[cGrid] ) identifier[vPnow] = identifier[np] . identifier[sum] ( identifier[vPgrid] * identifier[probsGrid] , identifier[axis] = literal[int] ) identifier[mGrid_small] = identifier[np] . identifier[concatenate] (( identifier[np] . identifier[reshape] ( identifier[self] . identifier[mLvlMinNow] ( identifier[self] . identifier[pLvlGrid] ),( literal[int] , identifier[pCount] )), identifier[mGrid] [:,:, literal[int] ])) identifier[vPnvrsNow] = identifier[np] . identifier[concatenate] (( identifier[np] . identifier[zeros] (( literal[int] , identifier[pCount] )), identifier[self] . identifier[uPinv] ( identifier[vPnow] ))) keyword[if] identifier[self] . identifier[vFuncBool] : identifier[vNvrsNow] = identifier[np] . identifier[concatenate] (( identifier[np] . identifier[zeros] (( literal[int] , identifier[pCount] )), identifier[self] . identifier[uinv] ( identifier[vNow] )), identifier[axis] = literal[int] ) identifier[vNvrsPnow] = identifier[vPnow] * identifier[self] . identifier[uinvP] ( identifier[vNow] ) identifier[vNvrsPnow] = identifier[np] . identifier[concatenate] (( identifier[np] . identifier[zeros] (( literal[int] , identifier[pCount] )), identifier[vNvrsPnow] ), identifier[axis] = literal[int] ) identifier[vPnvrsFunc_by_pLvl] =[] identifier[vNvrsFunc_by_pLvl] =[] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[pCount] ): identifier[pLvl] = identifier[self] . identifier[pLvlGrid] [ identifier[j] ] identifier[m_temp] = identifier[mGrid_small] [:, identifier[j] ]- identifier[self] . identifier[mLvlMinNow] ( identifier[pLvl] ) identifier[vPnvrs_temp] = identifier[vPnvrsNow] [:, identifier[j] ] identifier[vPnvrsFunc_by_pLvl] . identifier[append] ( identifier[LinearInterp] ( identifier[m_temp] , identifier[vPnvrs_temp] )) keyword[if] identifier[self] . identifier[vFuncBool] : identifier[vNvrs_temp] = identifier[vNvrsNow] [:, identifier[j] ] identifier[vNvrsP_temp] = identifier[vNvrsPnow] [:, identifier[j] ] identifier[vNvrsFunc_by_pLvl] . identifier[append] ( identifier[CubicInterp] ( identifier[m_temp] , identifier[vNvrs_temp] , identifier[vNvrsP_temp] )) identifier[vPnvrsFuncBase] = identifier[LinearInterpOnInterp1D] ( identifier[vPnvrsFunc_by_pLvl] , identifier[self] . identifier[pLvlGrid] ) identifier[vPnvrsFunc] = identifier[VariableLowerBoundFunc2D] ( identifier[vPnvrsFuncBase] , identifier[self] . identifier[mLvlMinNow] ) keyword[if] identifier[self] . identifier[vFuncBool] : identifier[vNvrsFuncBase] = identifier[LinearInterpOnInterp1D] ( identifier[vNvrsFunc_by_pLvl] , identifier[self] . identifier[pLvlGrid] ) identifier[vNvrsFunc] = identifier[VariableLowerBoundFunc2D] ( identifier[vNvrsFuncBase] , identifier[self] . identifier[mLvlMinNow] ) identifier[vPfunc] = identifier[MargValueFunc2D] ( identifier[vPnvrsFunc] , identifier[self] . identifier[CRRA] ) keyword[if] identifier[self] . identifier[vFuncBool] : identifier[vFunc] = identifier[ValueFunc2D] ( identifier[vNvrsFunc] , identifier[self] . identifier[CRRA] ) keyword[else] : identifier[vFunc] = identifier[NullFunc] () keyword[return] identifier[vFunc] , identifier[vPfunc]
def makevAndvPfuncs(self, policyFunc): """ Constructs the marginal value function for this period. Parameters ---------- policyFunc : function Consumption and medical care function for this period, defined over market resources, permanent income level, and the medical need shock. Returns ------- vFunc : function Value function for this period, defined over market resources and permanent income. vPfunc : function Marginal value (of market resources) function for this period, defined over market resources and permanent income. """ # Get state dimension sizes mCount = self.aXtraGrid.size pCount = self.pLvlGrid.size MedCount = self.MedShkVals.size # Make temporary grids to evaluate the consumption function temp_grid = np.tile(np.reshape(self.aXtraGrid, (mCount, 1, 1)), (1, pCount, MedCount)) aMinGrid = np.tile(np.reshape(self.mLvlMinNow(self.pLvlGrid), (1, pCount, 1)), (mCount, 1, MedCount)) pGrid = np.tile(np.reshape(self.pLvlGrid, (1, pCount, 1)), (mCount, 1, MedCount)) mGrid = temp_grid * pGrid + aMinGrid if self.pLvlGrid[0] == 0: mGrid[:, 0, :] = np.tile(np.reshape(self.aXtraGrid, (mCount, 1)), (1, MedCount)) # depends on [control=['if'], data=[]] MedShkGrid = np.tile(np.reshape(self.MedShkVals, (1, 1, MedCount)), (mCount, pCount, 1)) probsGrid = np.tile(np.reshape(self.MedShkPrbs, (1, 1, MedCount)), (mCount, pCount, 1)) # Get optimal consumption (and medical care) for each state (cGrid, MedGrid) = policyFunc(mGrid, pGrid, MedShkGrid) # Calculate expected value by "integrating" across medical shocks if self.vFuncBool: MedGrid = np.maximum(MedGrid, 1e-100) # interpolation error sometimes makes Med < 0 (barely) aGrid = np.maximum(mGrid - cGrid - self.MedPrice * MedGrid, aMinGrid) # interpolation error sometimes makes tiny violations vGrid = self.u(cGrid) + MedShkGrid * self.uMed(MedGrid) + self.EndOfPrdvFunc(aGrid, pGrid) vNow = np.sum(vGrid * probsGrid, axis=2) # depends on [control=['if'], data=[]] # Calculate expected marginal value by "integrating" across medical shocks vPgrid = self.uP(cGrid) vPnow = np.sum(vPgrid * probsGrid, axis=2) # Add vPnvrs=0 at m=mLvlMin to close it off at the bottom (and vNvrs=0) mGrid_small = np.concatenate((np.reshape(self.mLvlMinNow(self.pLvlGrid), (1, pCount)), mGrid[:, :, 0])) vPnvrsNow = np.concatenate((np.zeros((1, pCount)), self.uPinv(vPnow))) if self.vFuncBool: vNvrsNow = np.concatenate((np.zeros((1, pCount)), self.uinv(vNow)), axis=0) vNvrsPnow = vPnow * self.uinvP(vNow) vNvrsPnow = np.concatenate((np.zeros((1, pCount)), vNvrsPnow), axis=0) # depends on [control=['if'], data=[]] # Construct the pseudo-inverse value and marginal value functions over mLvl,pLvl vPnvrsFunc_by_pLvl = [] vNvrsFunc_by_pLvl = [] for j in range(pCount): # Make a pseudo inverse marginal value function for each pLvl pLvl = self.pLvlGrid[j] m_temp = mGrid_small[:, j] - self.mLvlMinNow(pLvl) vPnvrs_temp = vPnvrsNow[:, j] vPnvrsFunc_by_pLvl.append(LinearInterp(m_temp, vPnvrs_temp)) if self.vFuncBool: vNvrs_temp = vNvrsNow[:, j] vNvrsP_temp = vNvrsPnow[:, j] vNvrsFunc_by_pLvl.append(CubicInterp(m_temp, vNvrs_temp, vNvrsP_temp)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] vPnvrsFuncBase = LinearInterpOnInterp1D(vPnvrsFunc_by_pLvl, self.pLvlGrid) vPnvrsFunc = VariableLowerBoundFunc2D(vPnvrsFuncBase, self.mLvlMinNow) # adjust for the lower bound of mLvl if self.vFuncBool: vNvrsFuncBase = LinearInterpOnInterp1D(vNvrsFunc_by_pLvl, self.pLvlGrid) vNvrsFunc = VariableLowerBoundFunc2D(vNvrsFuncBase, self.mLvlMinNow) # adjust for the lower bound of mLvl # depends on [control=['if'], data=[]] # "Re-curve" the (marginal) value function vPfunc = MargValueFunc2D(vPnvrsFunc, self.CRRA) if self.vFuncBool: vFunc = ValueFunc2D(vNvrsFunc, self.CRRA) # depends on [control=['if'], data=[]] else: vFunc = NullFunc() return (vFunc, vPfunc)
def paste(**kwargs): """Returns system clipboard contents.""" clip.OpenClipboard() d = clip.GetClipboardData(win32con.CF_UNICODETEXT) clip.CloseClipboard() return d
def function[paste, parameter[]]: constant[Returns system clipboard contents.] call[name[clip].OpenClipboard, parameter[]] variable[d] assign[=] call[name[clip].GetClipboardData, parameter[name[win32con].CF_UNICODETEXT]] call[name[clip].CloseClipboard, parameter[]] return[name[d]]
keyword[def] identifier[paste] (** identifier[kwargs] ): literal[string] identifier[clip] . identifier[OpenClipboard] () identifier[d] = identifier[clip] . identifier[GetClipboardData] ( identifier[win32con] . identifier[CF_UNICODETEXT] ) identifier[clip] . identifier[CloseClipboard] () keyword[return] identifier[d]
def paste(**kwargs): """Returns system clipboard contents.""" clip.OpenClipboard() d = clip.GetClipboardData(win32con.CF_UNICODETEXT) clip.CloseClipboard() return d
def shape_offset_y(self): """Return y distance of shape origin from local coordinate origin. The returned integer represents the topmost extent of the freeform shape, in local coordinates. Note that the bounding box of the shape need not start at the local origin. """ min_y = self._start_y for drawing_operation in self: if hasattr(drawing_operation, 'y'): min_y = min(min_y, drawing_operation.y) return min_y
def function[shape_offset_y, parameter[self]]: constant[Return y distance of shape origin from local coordinate origin. The returned integer represents the topmost extent of the freeform shape, in local coordinates. Note that the bounding box of the shape need not start at the local origin. ] variable[min_y] assign[=] name[self]._start_y for taget[name[drawing_operation]] in starred[name[self]] begin[:] if call[name[hasattr], parameter[name[drawing_operation], constant[y]]] begin[:] variable[min_y] assign[=] call[name[min], parameter[name[min_y], name[drawing_operation].y]] return[name[min_y]]
keyword[def] identifier[shape_offset_y] ( identifier[self] ): literal[string] identifier[min_y] = identifier[self] . identifier[_start_y] keyword[for] identifier[drawing_operation] keyword[in] identifier[self] : keyword[if] identifier[hasattr] ( identifier[drawing_operation] , literal[string] ): identifier[min_y] = identifier[min] ( identifier[min_y] , identifier[drawing_operation] . identifier[y] ) keyword[return] identifier[min_y]
def shape_offset_y(self): """Return y distance of shape origin from local coordinate origin. The returned integer represents the topmost extent of the freeform shape, in local coordinates. Note that the bounding box of the shape need not start at the local origin. """ min_y = self._start_y for drawing_operation in self: if hasattr(drawing_operation, 'y'): min_y = min(min_y, drawing_operation.y) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['drawing_operation']] return min_y
def _tostring(value): '''Convert value to XML compatible string''' if value is True: value = 'true' elif value is False: value = 'false' elif value is None: value = '' return unicode(value)
def function[_tostring, parameter[value]]: constant[Convert value to XML compatible string] if compare[name[value] is constant[True]] begin[:] variable[value] assign[=] constant[true] return[call[name[unicode], parameter[name[value]]]]
keyword[def] identifier[_tostring] ( identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[True] : identifier[value] = literal[string] keyword[elif] identifier[value] keyword[is] keyword[False] : identifier[value] = literal[string] keyword[elif] identifier[value] keyword[is] keyword[None] : identifier[value] = literal[string] keyword[return] identifier[unicode] ( identifier[value] )
def _tostring(value): """Convert value to XML compatible string""" if value is True: value = 'true' # depends on [control=['if'], data=['value']] elif value is False: value = 'false' # depends on [control=['if'], data=['value']] elif value is None: value = '' # depends on [control=['if'], data=['value']] return unicode(value)
def _get_sv_callers(items): """ return a sorted list of all of the structural variant callers run """ callers = [] for data in items: for sv in data.get("sv", []): callers.append(sv["variantcaller"]) return list(set([x for x in callers if x != "sv-ensemble"])).sort()
def function[_get_sv_callers, parameter[items]]: constant[ return a sorted list of all of the structural variant callers run ] variable[callers] assign[=] list[[]] for taget[name[data]] in starred[name[items]] begin[:] for taget[name[sv]] in starred[call[name[data].get, parameter[constant[sv], list[[]]]]] begin[:] call[name[callers].append, parameter[call[name[sv]][constant[variantcaller]]]] return[call[call[name[list], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b17d7370>]]]].sort, parameter[]]]
keyword[def] identifier[_get_sv_callers] ( identifier[items] ): literal[string] identifier[callers] =[] keyword[for] identifier[data] keyword[in] identifier[items] : keyword[for] identifier[sv] keyword[in] identifier[data] . identifier[get] ( literal[string] ,[]): identifier[callers] . identifier[append] ( identifier[sv] [ literal[string] ]) keyword[return] identifier[list] ( identifier[set] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[callers] keyword[if] identifier[x] != literal[string] ])). identifier[sort] ()
def _get_sv_callers(items): """ return a sorted list of all of the structural variant callers run """ callers = [] for data in items: for sv in data.get('sv', []): callers.append(sv['variantcaller']) # depends on [control=['for'], data=['sv']] # depends on [control=['for'], data=['data']] return list(set([x for x in callers if x != 'sv-ensemble'])).sort()
def set_computer_desc(desc=None): ''' Set the Windows computer description Args: desc (str): The computer description Returns: str: Description if successful, otherwise ``False`` CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_desc 'This computer belongs to Dave!' ''' if six.PY2: desc = _to_unicode(desc) # Make sure the system exists # Return an object containing current information array for the computer system_info = win32net.NetServerGetInfo(None, 101) # If desc is passed, decode it for unicode if desc is None: return False system_info['comment'] = desc # Apply new settings try: win32net.NetServerSetInfo(None, 101, system_info) except win32net.error as exc: (number, context, message) = exc.args log.error('Failed to update system') log.error('nbr: %s', number) log.error('ctx: %s', context) log.error('msg: %s', message) return False return {'Computer Description': get_computer_desc()}
def function[set_computer_desc, parameter[desc]]: constant[ Set the Windows computer description Args: desc (str): The computer description Returns: str: Description if successful, otherwise ``False`` CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_desc 'This computer belongs to Dave!' ] if name[six].PY2 begin[:] variable[desc] assign[=] call[name[_to_unicode], parameter[name[desc]]] variable[system_info] assign[=] call[name[win32net].NetServerGetInfo, parameter[constant[None], constant[101]]] if compare[name[desc] is constant[None]] begin[:] return[constant[False]] call[name[system_info]][constant[comment]] assign[=] name[desc] <ast.Try object at 0x7da2044c22c0> return[dictionary[[<ast.Constant object at 0x7da2044c0f70>], [<ast.Call object at 0x7da18dc9bf10>]]]
keyword[def] identifier[set_computer_desc] ( identifier[desc] = keyword[None] ): literal[string] keyword[if] identifier[six] . identifier[PY2] : identifier[desc] = identifier[_to_unicode] ( identifier[desc] ) identifier[system_info] = identifier[win32net] . identifier[NetServerGetInfo] ( keyword[None] , literal[int] ) keyword[if] identifier[desc] keyword[is] keyword[None] : keyword[return] keyword[False] identifier[system_info] [ literal[string] ]= identifier[desc] keyword[try] : identifier[win32net] . identifier[NetServerSetInfo] ( keyword[None] , literal[int] , identifier[system_info] ) keyword[except] identifier[win32net] . identifier[error] keyword[as] identifier[exc] : ( identifier[number] , identifier[context] , identifier[message] )= identifier[exc] . identifier[args] identifier[log] . identifier[error] ( literal[string] ) identifier[log] . identifier[error] ( literal[string] , identifier[number] ) identifier[log] . identifier[error] ( literal[string] , identifier[context] ) identifier[log] . identifier[error] ( literal[string] , identifier[message] ) keyword[return] keyword[False] keyword[return] { literal[string] : identifier[get_computer_desc] ()}
def set_computer_desc(desc=None): """ Set the Windows computer description Args: desc (str): The computer description Returns: str: Description if successful, otherwise ``False`` CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_desc 'This computer belongs to Dave!' """ if six.PY2: desc = _to_unicode(desc) # depends on [control=['if'], data=[]] # Make sure the system exists # Return an object containing current information array for the computer system_info = win32net.NetServerGetInfo(None, 101) # If desc is passed, decode it for unicode if desc is None: return False # depends on [control=['if'], data=[]] system_info['comment'] = desc # Apply new settings try: win32net.NetServerSetInfo(None, 101, system_info) # depends on [control=['try'], data=[]] except win32net.error as exc: (number, context, message) = exc.args log.error('Failed to update system') log.error('nbr: %s', number) log.error('ctx: %s', context) log.error('msg: %s', message) return False # depends on [control=['except'], data=['exc']] return {'Computer Description': get_computer_desc()}
def relation(self, other): # type: (Term) -> int """ Returns the relationship between the package versions allowed by this term and another. """ if self.dependency.name != other.dependency.name: raise ValueError( "{} should refer to {}".format(other, self.dependency.name) ) other_constraint = other.constraint if other.is_positive(): if self.is_positive(): if not self._compatible_dependency(other.dependency): return SetRelation.DISJOINT # foo ^1.5.0 is a subset of foo ^1.0.0 if other_constraint.allows_all(self.constraint): return SetRelation.SUBSET # foo ^2.0.0 is disjoint with foo ^1.0.0 if not self.constraint.allows_any(other_constraint): return SetRelation.DISJOINT return SetRelation.OVERLAPPING else: if not self._compatible_dependency(other.dependency): return SetRelation.OVERLAPPING # not foo ^1.0.0 is disjoint with foo ^1.5.0 if self.constraint.allows_all(other_constraint): return SetRelation.DISJOINT # not foo ^1.5.0 overlaps foo ^1.0.0 # not foo ^2.0.0 is a superset of foo ^1.5.0 return SetRelation.OVERLAPPING else: if self.is_positive(): if not self._compatible_dependency(other.dependency): return SetRelation.SUBSET # foo ^2.0.0 is a subset of not foo ^1.0.0 if not other_constraint.allows_any(self.constraint): return SetRelation.SUBSET # foo ^1.5.0 is disjoint with not foo ^1.0.0 if other_constraint.allows_all(self.constraint): return SetRelation.DISJOINT # foo ^1.0.0 overlaps not foo ^1.5.0 return SetRelation.OVERLAPPING else: if not self._compatible_dependency(other.dependency): return SetRelation.OVERLAPPING # not foo ^1.0.0 is a subset of not foo ^1.5.0 if self.constraint.allows_all(other_constraint): return SetRelation.SUBSET # not foo ^2.0.0 overlaps not foo ^1.0.0 # not foo ^1.5.0 is a superset of not foo ^1.0.0 return SetRelation.OVERLAPPING
def function[relation, parameter[self, other]]: constant[ Returns the relationship between the package versions allowed by this term and another. ] if compare[name[self].dependency.name not_equal[!=] name[other].dependency.name] begin[:] <ast.Raise object at 0x7da18fe92200> variable[other_constraint] assign[=] name[other].constraint if call[name[other].is_positive, parameter[]] begin[:] if call[name[self].is_positive, parameter[]] begin[:] if <ast.UnaryOp object at 0x7da1b1ea0370> begin[:] return[name[SetRelation].DISJOINT] if call[name[other_constraint].allows_all, parameter[name[self].constraint]] begin[:] return[name[SetRelation].SUBSET] if <ast.UnaryOp object at 0x7da1b1fa2920> begin[:] return[name[SetRelation].DISJOINT] return[name[SetRelation].OVERLAPPING]
keyword[def] identifier[relation] ( identifier[self] , identifier[other] ): literal[string] keyword[if] identifier[self] . identifier[dependency] . identifier[name] != identifier[other] . identifier[dependency] . identifier[name] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[other] , identifier[self] . identifier[dependency] . identifier[name] ) ) identifier[other_constraint] = identifier[other] . identifier[constraint] keyword[if] identifier[other] . identifier[is_positive] (): keyword[if] identifier[self] . identifier[is_positive] (): keyword[if] keyword[not] identifier[self] . identifier[_compatible_dependency] ( identifier[other] . identifier[dependency] ): keyword[return] identifier[SetRelation] . identifier[DISJOINT] keyword[if] identifier[other_constraint] . identifier[allows_all] ( identifier[self] . identifier[constraint] ): keyword[return] identifier[SetRelation] . identifier[SUBSET] keyword[if] keyword[not] identifier[self] . identifier[constraint] . identifier[allows_any] ( identifier[other_constraint] ): keyword[return] identifier[SetRelation] . identifier[DISJOINT] keyword[return] identifier[SetRelation] . identifier[OVERLAPPING] keyword[else] : keyword[if] keyword[not] identifier[self] . identifier[_compatible_dependency] ( identifier[other] . identifier[dependency] ): keyword[return] identifier[SetRelation] . identifier[OVERLAPPING] keyword[if] identifier[self] . identifier[constraint] . identifier[allows_all] ( identifier[other_constraint] ): keyword[return] identifier[SetRelation] . identifier[DISJOINT] keyword[return] identifier[SetRelation] . identifier[OVERLAPPING] keyword[else] : keyword[if] identifier[self] . identifier[is_positive] (): keyword[if] keyword[not] identifier[self] . identifier[_compatible_dependency] ( identifier[other] . identifier[dependency] ): keyword[return] identifier[SetRelation] . identifier[SUBSET] keyword[if] keyword[not] identifier[other_constraint] . identifier[allows_any] ( identifier[self] . identifier[constraint] ): keyword[return] identifier[SetRelation] . identifier[SUBSET] keyword[if] identifier[other_constraint] . identifier[allows_all] ( identifier[self] . identifier[constraint] ): keyword[return] identifier[SetRelation] . identifier[DISJOINT] keyword[return] identifier[SetRelation] . identifier[OVERLAPPING] keyword[else] : keyword[if] keyword[not] identifier[self] . identifier[_compatible_dependency] ( identifier[other] . identifier[dependency] ): keyword[return] identifier[SetRelation] . identifier[OVERLAPPING] keyword[if] identifier[self] . identifier[constraint] . identifier[allows_all] ( identifier[other_constraint] ): keyword[return] identifier[SetRelation] . identifier[SUBSET] keyword[return] identifier[SetRelation] . identifier[OVERLAPPING]
def relation(self, other): # type: (Term) -> int '\n Returns the relationship between the package versions\n allowed by this term and another.\n ' if self.dependency.name != other.dependency.name: raise ValueError('{} should refer to {}'.format(other, self.dependency.name)) # depends on [control=['if'], data=[]] other_constraint = other.constraint if other.is_positive(): if self.is_positive(): if not self._compatible_dependency(other.dependency): return SetRelation.DISJOINT # depends on [control=['if'], data=[]] # foo ^1.5.0 is a subset of foo ^1.0.0 if other_constraint.allows_all(self.constraint): return SetRelation.SUBSET # depends on [control=['if'], data=[]] # foo ^2.0.0 is disjoint with foo ^1.0.0 if not self.constraint.allows_any(other_constraint): return SetRelation.DISJOINT # depends on [control=['if'], data=[]] return SetRelation.OVERLAPPING # depends on [control=['if'], data=[]] else: if not self._compatible_dependency(other.dependency): return SetRelation.OVERLAPPING # depends on [control=['if'], data=[]] # not foo ^1.0.0 is disjoint with foo ^1.5.0 if self.constraint.allows_all(other_constraint): return SetRelation.DISJOINT # depends on [control=['if'], data=[]] # not foo ^1.5.0 overlaps foo ^1.0.0 # not foo ^2.0.0 is a superset of foo ^1.5.0 return SetRelation.OVERLAPPING # depends on [control=['if'], data=[]] elif self.is_positive(): if not self._compatible_dependency(other.dependency): return SetRelation.SUBSET # depends on [control=['if'], data=[]] # foo ^2.0.0 is a subset of not foo ^1.0.0 if not other_constraint.allows_any(self.constraint): return SetRelation.SUBSET # depends on [control=['if'], data=[]] # foo ^1.5.0 is disjoint with not foo ^1.0.0 if other_constraint.allows_all(self.constraint): return SetRelation.DISJOINT # depends on [control=['if'], data=[]] # foo ^1.0.0 overlaps not foo ^1.5.0 return SetRelation.OVERLAPPING # depends on [control=['if'], data=[]] else: if not self._compatible_dependency(other.dependency): return SetRelation.OVERLAPPING # depends on [control=['if'], data=[]] # not foo ^1.0.0 is a subset of not foo ^1.5.0 if self.constraint.allows_all(other_constraint): return SetRelation.SUBSET # depends on [control=['if'], data=[]] # not foo ^2.0.0 overlaps not foo ^1.0.0 # not foo ^1.5.0 is a superset of not foo ^1.0.0 return SetRelation.OVERLAPPING
def read_length_and_key(fp): """ Helper to read descriptor key. """ length = read_fmt('I', fp)[0] key = fp.read(length or 4) if length == 0 and key not in _TERMS: logger.debug('Unknown term: %r' % (key)) _TERMS.add(key) return key
def function[read_length_and_key, parameter[fp]]: constant[ Helper to read descriptor key. ] variable[length] assign[=] call[call[name[read_fmt], parameter[constant[I], name[fp]]]][constant[0]] variable[key] assign[=] call[name[fp].read, parameter[<ast.BoolOp object at 0x7da1b26af250>]] if <ast.BoolOp object at 0x7da1b26ac190> begin[:] call[name[logger].debug, parameter[binary_operation[constant[Unknown term: %r] <ast.Mod object at 0x7da2590d6920> name[key]]]] call[name[_TERMS].add, parameter[name[key]]] return[name[key]]
keyword[def] identifier[read_length_and_key] ( identifier[fp] ): literal[string] identifier[length] = identifier[read_fmt] ( literal[string] , identifier[fp] )[ literal[int] ] identifier[key] = identifier[fp] . identifier[read] ( identifier[length] keyword[or] literal[int] ) keyword[if] identifier[length] == literal[int] keyword[and] identifier[key] keyword[not] keyword[in] identifier[_TERMS] : identifier[logger] . identifier[debug] ( literal[string] %( identifier[key] )) identifier[_TERMS] . identifier[add] ( identifier[key] ) keyword[return] identifier[key]
def read_length_and_key(fp): """ Helper to read descriptor key. """ length = read_fmt('I', fp)[0] key = fp.read(length or 4) if length == 0 and key not in _TERMS: logger.debug('Unknown term: %r' % key) _TERMS.add(key) # depends on [control=['if'], data=[]] return key
def _suggest_normalized_version(s): """Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion doesn't like it) then you might be able to get an equivalent (or close) rational version from this function. This does a number of simple normalizations to the given string, based on observation of versions currently in use on PyPI. Given a dump of those version during PyCon 2009, 4287 of them: - 2312 (53.93%) match NormalizedVersion without change with the automatic suggestion - 3474 (81.04%) match when using this suggestion method @param s {str} An irrational version string. @returns A rational version string, or None, if couldn't determine one. """ try: _normalized_key(s) return s # already rational except UnsupportedVersionError: pass rs = s.lower() # part of this could use maketrans for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), ('beta', 'b'), ('rc', 'c'), ('-final', ''), ('-pre', 'c'), ('-release', ''), ('.release', ''), ('-stable', ''), ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), ('final', '')): rs = rs.replace(orig, repl) # if something ends with dev or pre, we add a 0 rs = re.sub(r"pre$", r"pre0", rs) rs = re.sub(r"dev$", r"dev0", rs) # if we have something like "b-2" or "a.2" at the end of the # version, that is probably beta, alpha, etc # let's remove the dash or dot rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) # 1.0-dev-r371 -> 1.0.dev371 # 0.1-dev-r79 -> 0.1.dev79 rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) # Clean: v0.3, v1.0 if rs.startswith('v'): rs = rs[1:] # Clean leading '0's on numbers. #TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers # zero. # PyPI stats: 245 (7.56%) better rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) # the 'dev-rNNN' tag is a dev tag rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) # clean the - when used as a pre delimiter rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) # a terminal "dev" or "devel" can be changed into ".dev0" rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) # a terminal "dev" can be changed into ".dev0" rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) # a terminal "final" or "stable" can be removed rs = re.sub(r"(final|stable)$", "", rs) # The 'r' and the '-' tags are post release tags # 0.4a1.r10 -> 0.4a1.post10 # 0.9.33-17222 -> 0.9.33.post17222 # 0.9.33-r17222 -> 0.9.33.post17222 rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) # Clean 'r' instead of 'dev' usage: # 0.9.33+r17222 -> 0.9.33.dev17222 # 1.0dev123 -> 1.0.dev123 # 1.0.git123 -> 1.0.dev123 # 1.0.bzr123 -> 1.0.dev123 # 0.1a0dev.123 -> 0.1a0.dev123 # PyPI stats: ~150 (~4%) better rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: # 0.2.pre1 -> 0.2c1 # 0.2-c1 -> 0.2c1 # 1.0preview123 -> 1.0c123 # PyPI stats: ~21 (0.62%) better rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) # Tcl/Tk uses "px" for their post release markers rs = re.sub(r"p(\d+)$", r".post\1", rs) try: _normalized_key(rs) except UnsupportedVersionError: rs = None return rs
def function[_suggest_normalized_version, parameter[s]]: constant[Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion doesn't like it) then you might be able to get an equivalent (or close) rational version from this function. This does a number of simple normalizations to the given string, based on observation of versions currently in use on PyPI. Given a dump of those version during PyCon 2009, 4287 of them: - 2312 (53.93%) match NormalizedVersion without change with the automatic suggestion - 3474 (81.04%) match when using this suggestion method @param s {str} An irrational version string. @returns A rational version string, or None, if couldn't determine one. ] <ast.Try object at 0x7da1b2345c30> variable[rs] assign[=] call[name[s].lower, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b2344a00>, <ast.Name object at 0x7da1b2347520>]]] in starred[tuple[[<ast.Tuple object at 0x7da1b2346620>, <ast.Tuple object at 0x7da1b2345060>, <ast.Tuple object at 0x7da1b2346b60>, <ast.Tuple object at 0x7da1b2344100>, <ast.Tuple object at 0x7da1b23446d0>, <ast.Tuple object at 0x7da1b2344c70>, <ast.Tuple object at 0x7da1b2345420>, <ast.Tuple object at 0x7da1b2347af0>, <ast.Tuple object at 0x7da1b2344be0>, <ast.Tuple object at 0x7da1b2344070>, <ast.Tuple object at 0x7da1b2347e20>, <ast.Tuple object at 0x7da1b2347880>, <ast.Tuple object at 0x7da1b2346e00>, <ast.Tuple object at 0x7da1b23454b0>, <ast.Tuple object at 0x7da18fe92200>]]] begin[:] variable[rs] assign[=] call[name[rs].replace, parameter[name[orig], name[repl]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[pre$], constant[pre0], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[dev$], constant[dev0], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[([abc]|rc)[\-\.](\d+)$], constant[\1\2], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[[\-\.](dev)[\-\.]?r?(\d+)$], constant[.\1\2], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[[.~]?([abc])\.?], constant[\1], name[rs]]] if call[name[rs].startswith, parameter[constant[v]]] begin[:] variable[rs] assign[=] call[name[rs]][<ast.Slice object at 0x7da18fe93b80>] variable[rs] assign[=] call[name[re].sub, parameter[constant[\b0+(\d+)(?!\d)], constant[\1], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[(\d+[abc])$], constant[\g<1>0], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[\.?(dev-r|dev\.r)\.?(\d+)$], constant[.dev\2], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[-(a|b|c)(\d+)$], constant[\1\2], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[[\.\-](dev|devel)$], constant[.dev0], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[(?![\.\-])dev$], constant[.dev0], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[(final|stable)$], constant[], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[\.?(r|-|-r)\.?(\d+)$], constant[.post\2], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[\.?(dev|git|bzr)\.?(\d+)$], constant[.dev\2], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[\.?(pre|preview|-c)(\d+)$], constant[c\g<2>], name[rs]]] variable[rs] assign[=] call[name[re].sub, parameter[constant[p(\d+)$], constant[.post\1], name[rs]]] <ast.Try object at 0x7da18f09f880> return[name[rs]]
keyword[def] identifier[_suggest_normalized_version] ( identifier[s] ): literal[string] keyword[try] : identifier[_normalized_key] ( identifier[s] ) keyword[return] identifier[s] keyword[except] identifier[UnsupportedVersionError] : keyword[pass] identifier[rs] = identifier[s] . identifier[lower] () keyword[for] identifier[orig] , identifier[repl] keyword[in] (( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] ), ( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] )): identifier[rs] = identifier[rs] . identifier[replace] ( identifier[orig] , identifier[repl] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) keyword[if] identifier[rs] . identifier[startswith] ( literal[string] ): identifier[rs] = identifier[rs] [ literal[int] :] identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) identifier[rs] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[rs] ) keyword[try] : identifier[_normalized_key] ( identifier[rs] ) keyword[except] identifier[UnsupportedVersionError] : identifier[rs] = keyword[None] keyword[return] identifier[rs]
def _suggest_normalized_version(s): """Suggest a normalized version close to the given version string. If you have a version string that isn't rational (i.e. NormalizedVersion doesn't like it) then you might be able to get an equivalent (or close) rational version from this function. This does a number of simple normalizations to the given string, based on observation of versions currently in use on PyPI. Given a dump of those version during PyCon 2009, 4287 of them: - 2312 (53.93%) match NormalizedVersion without change with the automatic suggestion - 3474 (81.04%) match when using this suggestion method @param s {str} An irrational version string. @returns A rational version string, or None, if couldn't determine one. """ try: _normalized_key(s) return s # already rational # depends on [control=['try'], data=[]] except UnsupportedVersionError: pass # depends on [control=['except'], data=[]] rs = s.lower() # part of this could use maketrans for (orig, repl) in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), ('beta', 'b'), ('rc', 'c'), ('-final', ''), ('-pre', 'c'), ('-release', ''), ('.release', ''), ('-stable', ''), ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), ('final', '')): rs = rs.replace(orig, repl) # depends on [control=['for'], data=[]] # if something ends with dev or pre, we add a 0 rs = re.sub('pre$', 'pre0', rs) rs = re.sub('dev$', 'dev0', rs) # if we have something like "b-2" or "a.2" at the end of the # version, that is probably beta, alpha, etc # let's remove the dash or dot rs = re.sub('([abc]|rc)[\\-\\.](\\d+)$', '\\1\\2', rs) # 1.0-dev-r371 -> 1.0.dev371 # 0.1-dev-r79 -> 0.1.dev79 rs = re.sub('[\\-\\.](dev)[\\-\\.]?r?(\\d+)$', '.\\1\\2', rs) # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 rs = re.sub('[.~]?([abc])\\.?', '\\1', rs) # Clean: v0.3, v1.0 if rs.startswith('v'): rs = rs[1:] # depends on [control=['if'], data=[]] # Clean leading '0's on numbers. #TODO: unintended side-effect on, e.g., "2003.05.09" # PyPI stats: 77 (~2%) better rs = re.sub('\\b0+(\\d+)(?!\\d)', '\\1', rs) # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers # zero. # PyPI stats: 245 (7.56%) better rs = re.sub('(\\d+[abc])$', '\\g<1>0', rs) # the 'dev-rNNN' tag is a dev tag rs = re.sub('\\.?(dev-r|dev\\.r)\\.?(\\d+)$', '.dev\\2', rs) # clean the - when used as a pre delimiter rs = re.sub('-(a|b|c)(\\d+)$', '\\1\\2', rs) # a terminal "dev" or "devel" can be changed into ".dev0" rs = re.sub('[\\.\\-](dev|devel)$', '.dev0', rs) # a terminal "dev" can be changed into ".dev0" rs = re.sub('(?![\\.\\-])dev$', '.dev0', rs) # a terminal "final" or "stable" can be removed rs = re.sub('(final|stable)$', '', rs) # The 'r' and the '-' tags are post release tags # 0.4a1.r10 -> 0.4a1.post10 # 0.9.33-17222 -> 0.9.33.post17222 # 0.9.33-r17222 -> 0.9.33.post17222 rs = re.sub('\\.?(r|-|-r)\\.?(\\d+)$', '.post\\2', rs) # Clean 'r' instead of 'dev' usage: # 0.9.33+r17222 -> 0.9.33.dev17222 # 1.0dev123 -> 1.0.dev123 # 1.0.git123 -> 1.0.dev123 # 1.0.bzr123 -> 1.0.dev123 # 0.1a0dev.123 -> 0.1a0.dev123 # PyPI stats: ~150 (~4%) better rs = re.sub('\\.?(dev|git|bzr)\\.?(\\d+)$', '.dev\\2', rs) # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: # 0.2.pre1 -> 0.2c1 # 0.2-c1 -> 0.2c1 # 1.0preview123 -> 1.0c123 # PyPI stats: ~21 (0.62%) better rs = re.sub('\\.?(pre|preview|-c)(\\d+)$', 'c\\g<2>', rs) # Tcl/Tk uses "px" for their post release markers rs = re.sub('p(\\d+)$', '.post\\1', rs) try: _normalized_key(rs) # depends on [control=['try'], data=[]] except UnsupportedVersionError: rs = None # depends on [control=['except'], data=[]] return rs
def checkCorpNum(self, MemberCorpNum, CheckCorpNum): """ νœ΄νμ—…μ‘°νšŒ - 단건 args MemberCorpNum : νŒλΉŒνšŒμ› μ‚¬μ—…μžλ²ˆν˜Έ CorpNum : μ‘°νšŒν•  μ‚¬μ—…μžλ²ˆν˜Έ MgtKey : λ¬Έμ„œκ΄€λ¦¬λ²ˆν˜Έ return νœ΄νμ—…μ •λ³΄ object raise PopbillException """ if MemberCorpNum == None or MemberCorpNum == "" : raise PopbillException(-99999999,"νŒλΉŒνšŒμ› μ‚¬μ—…μžλ²ˆν˜Έκ°€ μž…λ ₯λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.") if CheckCorpNum == None or CheckCorpNum == "" : raise PopbillException(-99999999,"μ‘°νšŒν•  μ‚¬μ—…μžλ²ˆν˜Έκ°€ μž…λ ₯λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.") return self._httpget('/CloseDown?CN=' +CheckCorpNum, MemberCorpNum)
def function[checkCorpNum, parameter[self, MemberCorpNum, CheckCorpNum]]: constant[ νœ΄νμ—…μ‘°νšŒ - 단건 args MemberCorpNum : νŒλΉŒνšŒμ› μ‚¬μ—…μžλ²ˆν˜Έ CorpNum : μ‘°νšŒν•  μ‚¬μ—…μžλ²ˆν˜Έ MgtKey : λ¬Έμ„œκ΄€λ¦¬λ²ˆν˜Έ return νœ΄νμ—…μ •λ³΄ object raise PopbillException ] if <ast.BoolOp object at 0x7da2054a4970> begin[:] <ast.Raise object at 0x7da2054a5330> if <ast.BoolOp object at 0x7da18fe93490> begin[:] <ast.Raise object at 0x7da18fe91ae0> return[call[name[self]._httpget, parameter[binary_operation[constant[/CloseDown?CN=] + name[CheckCorpNum]], name[MemberCorpNum]]]]
keyword[def] identifier[checkCorpNum] ( identifier[self] , identifier[MemberCorpNum] , identifier[CheckCorpNum] ): literal[string] keyword[if] identifier[MemberCorpNum] == keyword[None] keyword[or] identifier[MemberCorpNum] == literal[string] : keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] ) keyword[if] identifier[CheckCorpNum] == keyword[None] keyword[or] identifier[CheckCorpNum] == literal[string] : keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] ) keyword[return] identifier[self] . identifier[_httpget] ( literal[string] + identifier[CheckCorpNum] , identifier[MemberCorpNum] )
def checkCorpNum(self, MemberCorpNum, CheckCorpNum): """ νœ΄νμ—…μ‘°νšŒ - 단건 args MemberCorpNum : νŒλΉŒνšŒμ› μ‚¬μ—…μžλ²ˆν˜Έ CorpNum : μ‘°νšŒν•  μ‚¬μ—…μžλ²ˆν˜Έ MgtKey : λ¬Έμ„œκ΄€λ¦¬λ²ˆν˜Έ return νœ΄νμ—…μ •λ³΄ object raise PopbillException """ if MemberCorpNum == None or MemberCorpNum == '': raise PopbillException(-99999999, 'νŒλΉŒνšŒμ› μ‚¬μ—…μžλ²ˆν˜Έκ°€ μž…λ ₯λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.') # depends on [control=['if'], data=[]] if CheckCorpNum == None or CheckCorpNum == '': raise PopbillException(-99999999, 'μ‘°νšŒν•  μ‚¬μ—…μžλ²ˆν˜Έκ°€ μž…λ ₯λ˜μ§€ μ•Šμ•˜μŠ΅λ‹ˆλ‹€.') # depends on [control=['if'], data=[]] return self._httpget('/CloseDown?CN=' + CheckCorpNum, MemberCorpNum)
def confirm_vlan(self, number_net, id_environment_vlan, ip_version=None): """Checking if the vlan insert need to be confirmed :param number_net: Filter by vlan number column :param id_environment_vlan: Filter by environment ID related :param ip_version: Ip version for checking :return: True is need confirmation, False if no need :raise AmbienteNaoExisteError: Ambiente nΓ£o cadastrado. :raise InvalidParameterError: Invalid ID for VLAN. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ url = 'vlan/confirm/' + \ str(number_net) + '/' + id_environment_vlan + '/' + str(ip_version) code, xml = self.submit(None, 'GET', url) return self.response(code, xml)
def function[confirm_vlan, parameter[self, number_net, id_environment_vlan, ip_version]]: constant[Checking if the vlan insert need to be confirmed :param number_net: Filter by vlan number column :param id_environment_vlan: Filter by environment ID related :param ip_version: Ip version for checking :return: True is need confirmation, False if no need :raise AmbienteNaoExisteError: Ambiente nΓ£o cadastrado. :raise InvalidParameterError: Invalid ID for VLAN. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. ] variable[url] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[vlan/confirm/] + call[name[str], parameter[name[number_net]]]] + constant[/]] + name[id_environment_vlan]] + constant[/]] + call[name[str], parameter[name[ip_version]]]] <ast.Tuple object at 0x7da2047e9ff0> assign[=] call[name[self].submit, parameter[constant[None], constant[GET], name[url]]] return[call[name[self].response, parameter[name[code], name[xml]]]]
keyword[def] identifier[confirm_vlan] ( identifier[self] , identifier[number_net] , identifier[id_environment_vlan] , identifier[ip_version] = keyword[None] ): literal[string] identifier[url] = literal[string] + identifier[str] ( identifier[number_net] )+ literal[string] + identifier[id_environment_vlan] + literal[string] + identifier[str] ( identifier[ip_version] ) identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] ) keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] )
def confirm_vlan(self, number_net, id_environment_vlan, ip_version=None): """Checking if the vlan insert need to be confirmed :param number_net: Filter by vlan number column :param id_environment_vlan: Filter by environment ID related :param ip_version: Ip version for checking :return: True is need confirmation, False if no need :raise AmbienteNaoExisteError: Ambiente nΓ£o cadastrado. :raise InvalidParameterError: Invalid ID for VLAN. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ url = 'vlan/confirm/' + str(number_net) + '/' + id_environment_vlan + '/' + str(ip_version) (code, xml) = self.submit(None, 'GET', url) return self.response(code, xml)
def notebook_complete(self, **kwargs): """ Finalize the metadata for a notebook and save the notebook to the output path. Called by Engine when execution concludes, regardless of exceptions. """ self.end_time = self.now() self.nb.metadata.papermill['end_time'] = self.end_time.isoformat() if self.nb.metadata.papermill.get('start_time'): self.nb.metadata.papermill['duration'] = ( self.end_time - self.start_time ).total_seconds() # Cleanup cell statuses in case callbacks were never called for cell in self.nb.cells: if cell.metadata.papermill['status'] == self.FAILED: break elif cell.metadata.papermill['status'] == self.PENDING: cell.metadata.papermill['status'] = self.COMPLETED self.complete_pbar() self.cleanup_pbar() # Force a final sync self.save()
def function[notebook_complete, parameter[self]]: constant[ Finalize the metadata for a notebook and save the notebook to the output path. Called by Engine when execution concludes, regardless of exceptions. ] name[self].end_time assign[=] call[name[self].now, parameter[]] call[name[self].nb.metadata.papermill][constant[end_time]] assign[=] call[name[self].end_time.isoformat, parameter[]] if call[name[self].nb.metadata.papermill.get, parameter[constant[start_time]]] begin[:] call[name[self].nb.metadata.papermill][constant[duration]] assign[=] call[binary_operation[name[self].end_time - name[self].start_time].total_seconds, parameter[]] for taget[name[cell]] in starred[name[self].nb.cells] begin[:] if compare[call[name[cell].metadata.papermill][constant[status]] equal[==] name[self].FAILED] begin[:] break call[name[self].complete_pbar, parameter[]] call[name[self].cleanup_pbar, parameter[]] call[name[self].save, parameter[]]
keyword[def] identifier[notebook_complete] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[end_time] = identifier[self] . identifier[now] () identifier[self] . identifier[nb] . identifier[metadata] . identifier[papermill] [ literal[string] ]= identifier[self] . identifier[end_time] . identifier[isoformat] () keyword[if] identifier[self] . identifier[nb] . identifier[metadata] . identifier[papermill] . identifier[get] ( literal[string] ): identifier[self] . identifier[nb] . identifier[metadata] . identifier[papermill] [ literal[string] ]=( identifier[self] . identifier[end_time] - identifier[self] . identifier[start_time] ). identifier[total_seconds] () keyword[for] identifier[cell] keyword[in] identifier[self] . identifier[nb] . identifier[cells] : keyword[if] identifier[cell] . identifier[metadata] . identifier[papermill] [ literal[string] ]== identifier[self] . identifier[FAILED] : keyword[break] keyword[elif] identifier[cell] . identifier[metadata] . identifier[papermill] [ literal[string] ]== identifier[self] . identifier[PENDING] : identifier[cell] . identifier[metadata] . identifier[papermill] [ literal[string] ]= identifier[self] . identifier[COMPLETED] identifier[self] . identifier[complete_pbar] () identifier[self] . identifier[cleanup_pbar] () identifier[self] . identifier[save] ()
def notebook_complete(self, **kwargs): """ Finalize the metadata for a notebook and save the notebook to the output path. Called by Engine when execution concludes, regardless of exceptions. """ self.end_time = self.now() self.nb.metadata.papermill['end_time'] = self.end_time.isoformat() if self.nb.metadata.papermill.get('start_time'): self.nb.metadata.papermill['duration'] = (self.end_time - self.start_time).total_seconds() # depends on [control=['if'], data=[]] # Cleanup cell statuses in case callbacks were never called for cell in self.nb.cells: if cell.metadata.papermill['status'] == self.FAILED: break # depends on [control=['if'], data=[]] elif cell.metadata.papermill['status'] == self.PENDING: cell.metadata.papermill['status'] = self.COMPLETED # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cell']] self.complete_pbar() self.cleanup_pbar() # Force a final sync self.save()
def compassmot_status_send(self, throttle, current, interference, CompensationX, CompensationY, CompensationZ, force_mavlink1=False): ''' Status of compassmot calibration throttle : throttle (percent*10) (uint16_t) current : current (amps) (float) interference : interference (percent) (uint16_t) CompensationX : Motor Compensation X (float) CompensationY : Motor Compensation Y (float) CompensationZ : Motor Compensation Z (float) ''' return self.send(self.compassmot_status_encode(throttle, current, interference, CompensationX, CompensationY, CompensationZ), force_mavlink1=force_mavlink1)
def function[compassmot_status_send, parameter[self, throttle, current, interference, CompensationX, CompensationY, CompensationZ, force_mavlink1]]: constant[ Status of compassmot calibration throttle : throttle (percent*10) (uint16_t) current : current (amps) (float) interference : interference (percent) (uint16_t) CompensationX : Motor Compensation X (float) CompensationY : Motor Compensation Y (float) CompensationZ : Motor Compensation Z (float) ] return[call[name[self].send, parameter[call[name[self].compassmot_status_encode, parameter[name[throttle], name[current], name[interference], name[CompensationX], name[CompensationY], name[CompensationZ]]]]]]
keyword[def] identifier[compassmot_status_send] ( identifier[self] , identifier[throttle] , identifier[current] , identifier[interference] , identifier[CompensationX] , identifier[CompensationY] , identifier[CompensationZ] , identifier[force_mavlink1] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[compassmot_status_encode] ( identifier[throttle] , identifier[current] , identifier[interference] , identifier[CompensationX] , identifier[CompensationY] , identifier[CompensationZ] ), identifier[force_mavlink1] = identifier[force_mavlink1] )
def compassmot_status_send(self, throttle, current, interference, CompensationX, CompensationY, CompensationZ, force_mavlink1=False): """ Status of compassmot calibration throttle : throttle (percent*10) (uint16_t) current : current (amps) (float) interference : interference (percent) (uint16_t) CompensationX : Motor Compensation X (float) CompensationY : Motor Compensation Y (float) CompensationZ : Motor Compensation Z (float) """ return self.send(self.compassmot_status_encode(throttle, current, interference, CompensationX, CompensationY, CompensationZ), force_mavlink1=force_mavlink1)
def _unapply_interception(target, ctx=None): """Unapply interception on input target in cleaning it. :param routine target: target from where removing an interception function. is_joinpoint(target) must be True. :param ctx: target ctx. """ # try to get the right ctx if ctx is None: ctx = find_ctx(elt=target) # get previous target intercepted, old_ctx = get_intercepted(target) # if ctx is None and old_ctx is not None, update ctx with old_ctx if ctx is None and old_ctx is not None: ctx = old_ctx if intercepted is None: raise JoinpointError('{0} must be intercepted'.format(target)) # flag to deleting of joinpoint_function del_joinpoint_function = False # if old target is a not modifiable resource if isbuiltin(intercepted): module = getmodule(intercepted) found = False # update references to target to not modifiable element in module for name, member in getmembers(module): if member is target: setattr(module, name, intercepted) found = True # if no reference found, raise an Exception if not found: raise JoinpointError( "Impossible to unapply interception on not modifiable element \ {0}. Must be contained in module {1}".format(target, module) ) elif ctx is None: # get joinpoint function joinpoint_function = _get_function(target) # update old code on target joinpoint_function.__code__ = intercepted.__code__ # ensure to delete joinpoint_function del_joinpoint_function = True else: # flag for joinpoint recovering recover = False # get interception name in order to update/delete interception from ctx intercepted_name = intercepted.__name__ # should we change of target or is it inherited ? if isclass(ctx): base_interception, _ = super_method(name=intercepted_name, ctx=ctx) else: base_interception = getattr(ctx.__class__, intercepted_name, None) # if base interception does not exist if base_interception is None: # recover intercepted recover = True else: # get joinpoint_function joinpoint_function = _get_function(target) # get base function if is_intercepted(base_interception): base_intercepted, _ = get_intercepted(base_interception) else: base_intercepted = _get_function(base_interception) # is interception inherited ? if base_intercepted is joinpoint_function: pass # do nothing # is intercepted inherited elif base_intercepted is intercepted: # del interception delattr(ctx, intercepted_name) del_joinpoint_function = True else: # base function is something else recover = True if recover: # if recover is required # new content to put in ctx new_content = intercepted if ismethod(target): # in creating eventually a new method args = [new_content, ctx] if PY2: # if py2, specify the ctx class # and unbound method type if target.__self__ is None: args = [new_content, None, ctx] else: # or instance method args.append(ctx.__class__) # instantiate a new method new_content = MethodType(*args) # update ctx with intercepted setattr(ctx, intercepted_name, new_content) joinpoint_function = _get_function(target) del_joinpoint_function = True if del_joinpoint_function: # delete _INTERCEPTED and _INTERCEPTED_CTX from joinpoint_function if hasattr(joinpoint_function, _INTERCEPTED): delattr(joinpoint_function, _INTERCEPTED) if hasattr(joinpoint_function, _INTERCEPTED_CTX): delattr(joinpoint_function, _INTERCEPTED_CTX) del joinpoint_function
def function[_unapply_interception, parameter[target, ctx]]: constant[Unapply interception on input target in cleaning it. :param routine target: target from where removing an interception function. is_joinpoint(target) must be True. :param ctx: target ctx. ] if compare[name[ctx] is constant[None]] begin[:] variable[ctx] assign[=] call[name[find_ctx], parameter[]] <ast.Tuple object at 0x7da1b0916590> assign[=] call[name[get_intercepted], parameter[name[target]]] if <ast.BoolOp object at 0x7da1b0915390> begin[:] variable[ctx] assign[=] name[old_ctx] if compare[name[intercepted] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0a66d40> variable[del_joinpoint_function] assign[=] constant[False] if call[name[isbuiltin], parameter[name[intercepted]]] begin[:] variable[module] assign[=] call[name[getmodule], parameter[name[intercepted]]] variable[found] assign[=] constant[False] for taget[tuple[[<ast.Name object at 0x7da1b0a65ab0>, <ast.Name object at 0x7da1b0a65450>]]] in starred[call[name[getmembers], parameter[name[module]]]] begin[:] if compare[name[member] is name[target]] begin[:] call[name[setattr], parameter[name[module], name[name], name[intercepted]]] variable[found] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b0a666b0> begin[:] <ast.Raise object at 0x7da1b0a65510> if name[del_joinpoint_function] begin[:] if call[name[hasattr], parameter[name[joinpoint_function], name[_INTERCEPTED]]] begin[:] call[name[delattr], parameter[name[joinpoint_function], name[_INTERCEPTED]]] if call[name[hasattr], parameter[name[joinpoint_function], name[_INTERCEPTED_CTX]]] begin[:] call[name[delattr], parameter[name[joinpoint_function], name[_INTERCEPTED_CTX]]] <ast.Delete object at 0x7da1b0a4ca30>
keyword[def] identifier[_unapply_interception] ( identifier[target] , identifier[ctx] = keyword[None] ): literal[string] keyword[if] identifier[ctx] keyword[is] keyword[None] : identifier[ctx] = identifier[find_ctx] ( identifier[elt] = identifier[target] ) identifier[intercepted] , identifier[old_ctx] = identifier[get_intercepted] ( identifier[target] ) keyword[if] identifier[ctx] keyword[is] keyword[None] keyword[and] identifier[old_ctx] keyword[is] keyword[not] keyword[None] : identifier[ctx] = identifier[old_ctx] keyword[if] identifier[intercepted] keyword[is] keyword[None] : keyword[raise] identifier[JoinpointError] ( literal[string] . identifier[format] ( identifier[target] )) identifier[del_joinpoint_function] = keyword[False] keyword[if] identifier[isbuiltin] ( identifier[intercepted] ): identifier[module] = identifier[getmodule] ( identifier[intercepted] ) identifier[found] = keyword[False] keyword[for] identifier[name] , identifier[member] keyword[in] identifier[getmembers] ( identifier[module] ): keyword[if] identifier[member] keyword[is] identifier[target] : identifier[setattr] ( identifier[module] , identifier[name] , identifier[intercepted] ) identifier[found] = keyword[True] keyword[if] keyword[not] identifier[found] : keyword[raise] identifier[JoinpointError] ( literal[string] . identifier[format] ( identifier[target] , identifier[module] ) ) keyword[elif] identifier[ctx] keyword[is] keyword[None] : identifier[joinpoint_function] = identifier[_get_function] ( identifier[target] ) identifier[joinpoint_function] . identifier[__code__] = identifier[intercepted] . identifier[__code__] identifier[del_joinpoint_function] = keyword[True] keyword[else] : identifier[recover] = keyword[False] identifier[intercepted_name] = identifier[intercepted] . identifier[__name__] keyword[if] identifier[isclass] ( identifier[ctx] ): identifier[base_interception] , identifier[_] = identifier[super_method] ( identifier[name] = identifier[intercepted_name] , identifier[ctx] = identifier[ctx] ) keyword[else] : identifier[base_interception] = identifier[getattr] ( identifier[ctx] . identifier[__class__] , identifier[intercepted_name] , keyword[None] ) keyword[if] identifier[base_interception] keyword[is] keyword[None] : identifier[recover] = keyword[True] keyword[else] : identifier[joinpoint_function] = identifier[_get_function] ( identifier[target] ) keyword[if] identifier[is_intercepted] ( identifier[base_interception] ): identifier[base_intercepted] , identifier[_] = identifier[get_intercepted] ( identifier[base_interception] ) keyword[else] : identifier[base_intercepted] = identifier[_get_function] ( identifier[base_interception] ) keyword[if] identifier[base_intercepted] keyword[is] identifier[joinpoint_function] : keyword[pass] keyword[elif] identifier[base_intercepted] keyword[is] identifier[intercepted] : identifier[delattr] ( identifier[ctx] , identifier[intercepted_name] ) identifier[del_joinpoint_function] = keyword[True] keyword[else] : identifier[recover] = keyword[True] keyword[if] identifier[recover] : identifier[new_content] = identifier[intercepted] keyword[if] identifier[ismethod] ( identifier[target] ): identifier[args] =[ identifier[new_content] , identifier[ctx] ] keyword[if] identifier[PY2] : keyword[if] identifier[target] . identifier[__self__] keyword[is] keyword[None] : identifier[args] =[ identifier[new_content] , keyword[None] , identifier[ctx] ] keyword[else] : identifier[args] . identifier[append] ( identifier[ctx] . identifier[__class__] ) identifier[new_content] = identifier[MethodType] (* identifier[args] ) identifier[setattr] ( identifier[ctx] , identifier[intercepted_name] , identifier[new_content] ) identifier[joinpoint_function] = identifier[_get_function] ( identifier[target] ) identifier[del_joinpoint_function] = keyword[True] keyword[if] identifier[del_joinpoint_function] : keyword[if] identifier[hasattr] ( identifier[joinpoint_function] , identifier[_INTERCEPTED] ): identifier[delattr] ( identifier[joinpoint_function] , identifier[_INTERCEPTED] ) keyword[if] identifier[hasattr] ( identifier[joinpoint_function] , identifier[_INTERCEPTED_CTX] ): identifier[delattr] ( identifier[joinpoint_function] , identifier[_INTERCEPTED_CTX] ) keyword[del] identifier[joinpoint_function]
def _unapply_interception(target, ctx=None): """Unapply interception on input target in cleaning it. :param routine target: target from where removing an interception function. is_joinpoint(target) must be True. :param ctx: target ctx. """ # try to get the right ctx if ctx is None: ctx = find_ctx(elt=target) # depends on [control=['if'], data=['ctx']] # get previous target (intercepted, old_ctx) = get_intercepted(target) # if ctx is None and old_ctx is not None, update ctx with old_ctx if ctx is None and old_ctx is not None: ctx = old_ctx # depends on [control=['if'], data=[]] if intercepted is None: raise JoinpointError('{0} must be intercepted'.format(target)) # depends on [control=['if'], data=[]] # flag to deleting of joinpoint_function del_joinpoint_function = False # if old target is a not modifiable resource if isbuiltin(intercepted): module = getmodule(intercepted) found = False # update references to target to not modifiable element in module for (name, member) in getmembers(module): if member is target: setattr(module, name, intercepted) found = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # if no reference found, raise an Exception if not found: raise JoinpointError('Impossible to unapply interception on not modifiable element {0}. Must be contained in module {1}'.format(target, module)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif ctx is None: # get joinpoint function joinpoint_function = _get_function(target) # update old code on target joinpoint_function.__code__ = intercepted.__code__ # ensure to delete joinpoint_function del_joinpoint_function = True # depends on [control=['if'], data=[]] else: # flag for joinpoint recovering recover = False # get interception name in order to update/delete interception from ctx intercepted_name = intercepted.__name__ # should we change of target or is it inherited ? if isclass(ctx): (base_interception, _) = super_method(name=intercepted_name, ctx=ctx) # depends on [control=['if'], data=[]] else: base_interception = getattr(ctx.__class__, intercepted_name, None) # if base interception does not exist if base_interception is None: # recover intercepted recover = True # depends on [control=['if'], data=[]] else: # get joinpoint_function joinpoint_function = _get_function(target) # get base function if is_intercepted(base_interception): (base_intercepted, _) = get_intercepted(base_interception) # depends on [control=['if'], data=[]] else: base_intercepted = _get_function(base_interception) # is interception inherited ? if base_intercepted is joinpoint_function: pass # do nothing # depends on [control=['if'], data=[]] # is intercepted inherited elif base_intercepted is intercepted: # del interception delattr(ctx, intercepted_name) del_joinpoint_function = True # depends on [control=['if'], data=[]] else: # base function is something else recover = True if recover: # if recover is required # new content to put in ctx new_content = intercepted if ismethod(target): # in creating eventually a new method args = [new_content, ctx] if PY2: # if py2, specify the ctx class # and unbound method type if target.__self__ is None: args = [new_content, None, ctx] # depends on [control=['if'], data=[]] else: # or instance method args.append(ctx.__class__) # depends on [control=['if'], data=[]] # instantiate a new method new_content = MethodType(*args) # depends on [control=['if'], data=[]] # update ctx with intercepted setattr(ctx, intercepted_name, new_content) joinpoint_function = _get_function(target) del_joinpoint_function = True # depends on [control=['if'], data=[]] if del_joinpoint_function: # delete _INTERCEPTED and _INTERCEPTED_CTX from joinpoint_function if hasattr(joinpoint_function, _INTERCEPTED): delattr(joinpoint_function, _INTERCEPTED) if hasattr(joinpoint_function, _INTERCEPTED_CTX): delattr(joinpoint_function, _INTERCEPTED_CTX) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] del joinpoint_function # depends on [control=['if'], data=[]]
def get_real_percent(self): """get_real_percent() Returns the unmodified percentage of the score based on a 0-point scale.""" if not (self.votes and self.score): return 0 return 100 * (self.get_real_rating() / self.field.range)
def function[get_real_percent, parameter[self]]: constant[get_real_percent() Returns the unmodified percentage of the score based on a 0-point scale.] if <ast.UnaryOp object at 0x7da18dc05480> begin[:] return[constant[0]] return[binary_operation[constant[100] * binary_operation[call[name[self].get_real_rating, parameter[]] / name[self].field.range]]]
keyword[def] identifier[get_real_percent] ( identifier[self] ): literal[string] keyword[if] keyword[not] ( identifier[self] . identifier[votes] keyword[and] identifier[self] . identifier[score] ): keyword[return] literal[int] keyword[return] literal[int] *( identifier[self] . identifier[get_real_rating] ()/ identifier[self] . identifier[field] . identifier[range] )
def get_real_percent(self): """get_real_percent() Returns the unmodified percentage of the score based on a 0-point scale.""" if not (self.votes and self.score): return 0 # depends on [control=['if'], data=[]] return 100 * (self.get_real_rating() / self.field.range)
def _compute_initial_out_degree(self): """The number of operations which use each tensor as input. Returns: a {string, int} mapping tensor name to the number of operations which use it as input, or one plus that quantity if the tensor is final. """ out_degree = collections.defaultdict(int) # Pretend that final tensors have an additional degree so they are not # freed. for tensor_name in self.get_all_tensor_names(): if self.is_tensor_final(tensor_name): out_degree[tensor_name] = 1 for operation_name in self.get_all_operation_names(): for input_name in self.get_operation_input_names(operation_name): out_degree[input_name] += 1 return out_degree
def function[_compute_initial_out_degree, parameter[self]]: constant[The number of operations which use each tensor as input. Returns: a {string, int} mapping tensor name to the number of operations which use it as input, or one plus that quantity if the tensor is final. ] variable[out_degree] assign[=] call[name[collections].defaultdict, parameter[name[int]]] for taget[name[tensor_name]] in starred[call[name[self].get_all_tensor_names, parameter[]]] begin[:] if call[name[self].is_tensor_final, parameter[name[tensor_name]]] begin[:] call[name[out_degree]][name[tensor_name]] assign[=] constant[1] for taget[name[operation_name]] in starred[call[name[self].get_all_operation_names, parameter[]]] begin[:] for taget[name[input_name]] in starred[call[name[self].get_operation_input_names, parameter[name[operation_name]]]] begin[:] <ast.AugAssign object at 0x7da204566b00> return[name[out_degree]]
keyword[def] identifier[_compute_initial_out_degree] ( identifier[self] ): literal[string] identifier[out_degree] = identifier[collections] . identifier[defaultdict] ( identifier[int] ) keyword[for] identifier[tensor_name] keyword[in] identifier[self] . identifier[get_all_tensor_names] (): keyword[if] identifier[self] . identifier[is_tensor_final] ( identifier[tensor_name] ): identifier[out_degree] [ identifier[tensor_name] ]= literal[int] keyword[for] identifier[operation_name] keyword[in] identifier[self] . identifier[get_all_operation_names] (): keyword[for] identifier[input_name] keyword[in] identifier[self] . identifier[get_operation_input_names] ( identifier[operation_name] ): identifier[out_degree] [ identifier[input_name] ]+= literal[int] keyword[return] identifier[out_degree]
def _compute_initial_out_degree(self): """The number of operations which use each tensor as input. Returns: a {string, int} mapping tensor name to the number of operations which use it as input, or one plus that quantity if the tensor is final. """ out_degree = collections.defaultdict(int) # Pretend that final tensors have an additional degree so they are not # freed. for tensor_name in self.get_all_tensor_names(): if self.is_tensor_final(tensor_name): out_degree[tensor_name] = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tensor_name']] for operation_name in self.get_all_operation_names(): for input_name in self.get_operation_input_names(operation_name): out_degree[input_name] += 1 # depends on [control=['for'], data=['input_name']] # depends on [control=['for'], data=['operation_name']] return out_degree