code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def __ensure_suffix_stem(t, suffix): """ Ensure that the target t has the given suffix, and return the file's stem. """ tpath = str(t) if not tpath.endswith(suffix): stem = tpath tpath += suffix return tpath, stem else: stem, ext = os.path.splitext(tpath) return t, stem
def function[__ensure_suffix_stem, parameter[t, suffix]]: constant[ Ensure that the target t has the given suffix, and return the file's stem. ] variable[tpath] assign[=] call[name[str], parameter[name[t]]] if <ast.UnaryOp object at 0x7da2041d9ff0> begin[:] variable[stem] assign[=] name[tpath] <ast.AugAssign object at 0x7da2041dbe20> return[tuple[[<ast.Name object at 0x7da2041d8d60>, <ast.Name object at 0x7da2041d84f0>]]] return[tuple[[<ast.Name object at 0x7da2041db6d0>, <ast.Name object at 0x7da2041d8550>]]]
keyword[def] identifier[__ensure_suffix_stem] ( identifier[t] , identifier[suffix] ): literal[string] identifier[tpath] = identifier[str] ( identifier[t] ) keyword[if] keyword[not] identifier[tpath] . identifier[endswith] ( identifier[suffix] ): identifier[stem] = identifier[tpath] identifier[tpath] += identifier[suffix] keyword[return] identifier[tpath] , identifier[stem] keyword[else] : identifier[stem] , identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[tpath] ) keyword[return] identifier[t] , identifier[stem]
def __ensure_suffix_stem(t, suffix): """ Ensure that the target t has the given suffix, and return the file's stem. """ tpath = str(t) if not tpath.endswith(suffix): stem = tpath tpath += suffix return (tpath, stem) # depends on [control=['if'], data=[]] else: (stem, ext) = os.path.splitext(tpath) return (t, stem)
def process(self, user, timestamp, data=None): """ Processes a user event. :Parameters: user : `hashable` A hashable value to identify a user (`int` or `str` are OK) timestamp : :class:`mwtypes.Timestamp` The timestamp of the event data : `mixed` Event meta data :Returns: A generator of :class:`~mwsessions.Session` expired after processing the user event. """ event = Event(user, mwtypes.Timestamp(timestamp), self.event_i, data) self.event_i += 1 for user, events in self._clear_expired(event.timestamp): yield Session(user, unpack_events(events)) # Apply revision if event.user in self.active_users: events = self.active_users[event.user] else: events = [] self.active_users[event.user] = events active_session = ActiveSession(event.timestamp, event.i, events) self.recently_active.push(active_session) events.append(event)
def function[process, parameter[self, user, timestamp, data]]: constant[ Processes a user event. :Parameters: user : `hashable` A hashable value to identify a user (`int` or `str` are OK) timestamp : :class:`mwtypes.Timestamp` The timestamp of the event data : `mixed` Event meta data :Returns: A generator of :class:`~mwsessions.Session` expired after processing the user event. ] variable[event] assign[=] call[name[Event], parameter[name[user], call[name[mwtypes].Timestamp, parameter[name[timestamp]]], name[self].event_i, name[data]]] <ast.AugAssign object at 0x7da18f723430> for taget[tuple[[<ast.Name object at 0x7da18f7225f0>, <ast.Name object at 0x7da18f7207c0>]]] in starred[call[name[self]._clear_expired, parameter[name[event].timestamp]]] begin[:] <ast.Yield object at 0x7da18f721f30> if compare[name[event].user in name[self].active_users] begin[:] variable[events] assign[=] call[name[self].active_users][name[event].user] call[name[events].append, parameter[name[event]]]
keyword[def] identifier[process] ( identifier[self] , identifier[user] , identifier[timestamp] , identifier[data] = keyword[None] ): literal[string] identifier[event] = identifier[Event] ( identifier[user] , identifier[mwtypes] . identifier[Timestamp] ( identifier[timestamp] ), identifier[self] . identifier[event_i] , identifier[data] ) identifier[self] . identifier[event_i] += literal[int] keyword[for] identifier[user] , identifier[events] keyword[in] identifier[self] . identifier[_clear_expired] ( identifier[event] . identifier[timestamp] ): keyword[yield] identifier[Session] ( identifier[user] , identifier[unpack_events] ( identifier[events] )) keyword[if] identifier[event] . identifier[user] keyword[in] identifier[self] . identifier[active_users] : identifier[events] = identifier[self] . identifier[active_users] [ identifier[event] . identifier[user] ] keyword[else] : identifier[events] =[] identifier[self] . identifier[active_users] [ identifier[event] . identifier[user] ]= identifier[events] identifier[active_session] = identifier[ActiveSession] ( identifier[event] . identifier[timestamp] , identifier[event] . identifier[i] , identifier[events] ) identifier[self] . identifier[recently_active] . identifier[push] ( identifier[active_session] ) identifier[events] . identifier[append] ( identifier[event] )
def process(self, user, timestamp, data=None): """ Processes a user event. :Parameters: user : `hashable` A hashable value to identify a user (`int` or `str` are OK) timestamp : :class:`mwtypes.Timestamp` The timestamp of the event data : `mixed` Event meta data :Returns: A generator of :class:`~mwsessions.Session` expired after processing the user event. """ event = Event(user, mwtypes.Timestamp(timestamp), self.event_i, data) self.event_i += 1 for (user, events) in self._clear_expired(event.timestamp): yield Session(user, unpack_events(events)) # depends on [control=['for'], data=[]] # Apply revision if event.user in self.active_users: events = self.active_users[event.user] # depends on [control=['if'], data=[]] else: events = [] self.active_users[event.user] = events active_session = ActiveSession(event.timestamp, event.i, events) self.recently_active.push(active_session) events.append(event)
def block_partition(block, i): """ Returns two blocks, as a result of partitioning the given one at i-th instruction. """ i += 1 new_block = BasicBlock(block.asm[i:]) block.mem = block.mem[:i] block.asm = block.asm[:i] block.update_labels() new_block.update_labels() new_block.goes_to = block.goes_to block.goes_to = IdentitySet() new_block.label_goes = block.label_goes block.label_goes = [] new_block.next = new_block.original_next = block.original_next new_block.prev = block new_block.add_comes_from(block) if new_block.next is not None: new_block.next.prev = new_block new_block.next.add_comes_from(new_block) new_block.next.delete_from(block) block.next = block.original_next = new_block block.update_next_block() block.add_goes_to(new_block) return block, new_block
def function[block_partition, parameter[block, i]]: constant[ Returns two blocks, as a result of partitioning the given one at i-th instruction. ] <ast.AugAssign object at 0x7da18f09f430> variable[new_block] assign[=] call[name[BasicBlock], parameter[call[name[block].asm][<ast.Slice object at 0x7da18f09d990>]]] name[block].mem assign[=] call[name[block].mem][<ast.Slice object at 0x7da18f09f9d0>] name[block].asm assign[=] call[name[block].asm][<ast.Slice object at 0x7da18f09df00>] call[name[block].update_labels, parameter[]] call[name[new_block].update_labels, parameter[]] name[new_block].goes_to assign[=] name[block].goes_to name[block].goes_to assign[=] call[name[IdentitySet], parameter[]] name[new_block].label_goes assign[=] name[block].label_goes name[block].label_goes assign[=] list[[]] name[new_block].next assign[=] name[block].original_next name[new_block].prev assign[=] name[block] call[name[new_block].add_comes_from, parameter[name[block]]] if compare[name[new_block].next is_not constant[None]] begin[:] name[new_block].next.prev assign[=] name[new_block] call[name[new_block].next.add_comes_from, parameter[name[new_block]]] call[name[new_block].next.delete_from, parameter[name[block]]] name[block].next assign[=] name[new_block] call[name[block].update_next_block, parameter[]] call[name[block].add_goes_to, parameter[name[new_block]]] return[tuple[[<ast.Name object at 0x7da20c6c7400>, <ast.Name object at 0x7da20c6c6170>]]]
keyword[def] identifier[block_partition] ( identifier[block] , identifier[i] ): literal[string] identifier[i] += literal[int] identifier[new_block] = identifier[BasicBlock] ( identifier[block] . identifier[asm] [ identifier[i] :]) identifier[block] . identifier[mem] = identifier[block] . identifier[mem] [: identifier[i] ] identifier[block] . identifier[asm] = identifier[block] . identifier[asm] [: identifier[i] ] identifier[block] . identifier[update_labels] () identifier[new_block] . identifier[update_labels] () identifier[new_block] . identifier[goes_to] = identifier[block] . identifier[goes_to] identifier[block] . identifier[goes_to] = identifier[IdentitySet] () identifier[new_block] . identifier[label_goes] = identifier[block] . identifier[label_goes] identifier[block] . identifier[label_goes] =[] identifier[new_block] . identifier[next] = identifier[new_block] . identifier[original_next] = identifier[block] . identifier[original_next] identifier[new_block] . identifier[prev] = identifier[block] identifier[new_block] . identifier[add_comes_from] ( identifier[block] ) keyword[if] identifier[new_block] . identifier[next] keyword[is] keyword[not] keyword[None] : identifier[new_block] . identifier[next] . identifier[prev] = identifier[new_block] identifier[new_block] . identifier[next] . identifier[add_comes_from] ( identifier[new_block] ) identifier[new_block] . identifier[next] . identifier[delete_from] ( identifier[block] ) identifier[block] . identifier[next] = identifier[block] . identifier[original_next] = identifier[new_block] identifier[block] . identifier[update_next_block] () identifier[block] . identifier[add_goes_to] ( identifier[new_block] ) keyword[return] identifier[block] , identifier[new_block]
def block_partition(block, i): """ Returns two blocks, as a result of partitioning the given one at i-th instruction. """ i += 1 new_block = BasicBlock(block.asm[i:]) block.mem = block.mem[:i] block.asm = block.asm[:i] block.update_labels() new_block.update_labels() new_block.goes_to = block.goes_to block.goes_to = IdentitySet() new_block.label_goes = block.label_goes block.label_goes = [] new_block.next = new_block.original_next = block.original_next new_block.prev = block new_block.add_comes_from(block) if new_block.next is not None: new_block.next.prev = new_block new_block.next.add_comes_from(new_block) new_block.next.delete_from(block) # depends on [control=['if'], data=[]] block.next = block.original_next = new_block block.update_next_block() block.add_goes_to(new_block) return (block, new_block)
def length(string, until=None): """ Returns the number of graphemes in the string. Note that this functions needs to traverse the full string to calculate the length, unlike `len(string)` and it's time consumption is linear to the length of the string (up to the `until` value). Only counts up to the `until` argument, if given. This is useful when testing the length of a string against some limit and the excess length is not interesting. >>> rainbow_flag = "πŸ³οΈβ€πŸŒˆ" >>> len(rainbow_flag) 4 >>> graphemes.length(rainbow_flag) 1 >>> graphemes.length("".join(str(i) for i in range(100)), 30) 30 """ if until is None: return sum(1 for _ in GraphemeIterator(string)) iterator = graphemes(string) count = 0 while True: try: if count >= until: break next(iterator) except StopIteration: break else: count += 1 return count
def function[length, parameter[string, until]]: constant[ Returns the number of graphemes in the string. Note that this functions needs to traverse the full string to calculate the length, unlike `len(string)` and it's time consumption is linear to the length of the string (up to the `until` value). Only counts up to the `until` argument, if given. This is useful when testing the length of a string against some limit and the excess length is not interesting. >>> rainbow_flag = "πŸ³οΈβ€πŸŒˆ" >>> len(rainbow_flag) 4 >>> graphemes.length(rainbow_flag) 1 >>> graphemes.length("".join(str(i) for i in range(100)), 30) 30 ] if compare[name[until] is constant[None]] begin[:] return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b10bcb50>]]] variable[iterator] assign[=] call[name[graphemes], parameter[name[string]]] variable[count] assign[=] constant[0] while constant[True] begin[:] <ast.Try object at 0x7da1b1052380> return[name[count]]
keyword[def] identifier[length] ( identifier[string] , identifier[until] = keyword[None] ): literal[string] keyword[if] identifier[until] keyword[is] keyword[None] : keyword[return] identifier[sum] ( literal[int] keyword[for] identifier[_] keyword[in] identifier[GraphemeIterator] ( identifier[string] )) identifier[iterator] = identifier[graphemes] ( identifier[string] ) identifier[count] = literal[int] keyword[while] keyword[True] : keyword[try] : keyword[if] identifier[count] >= identifier[until] : keyword[break] identifier[next] ( identifier[iterator] ) keyword[except] identifier[StopIteration] : keyword[break] keyword[else] : identifier[count] += literal[int] keyword[return] identifier[count]
def length(string, until=None): """ Returns the number of graphemes in the string. Note that this functions needs to traverse the full string to calculate the length, unlike `len(string)` and it's time consumption is linear to the length of the string (up to the `until` value). Only counts up to the `until` argument, if given. This is useful when testing the length of a string against some limit and the excess length is not interesting. >>> rainbow_flag = "🏳️\u200d🌈" >>> len(rainbow_flag) 4 >>> graphemes.length(rainbow_flag) 1 >>> graphemes.length("".join(str(i) for i in range(100)), 30) 30 """ if until is None: return sum((1 for _ in GraphemeIterator(string))) # depends on [control=['if'], data=[]] iterator = graphemes(string) count = 0 while True: try: if count >= until: break # depends on [control=['if'], data=[]] next(iterator) # depends on [control=['try'], data=[]] except StopIteration: break # depends on [control=['except'], data=[]] else: count += 1 # depends on [control=['while'], data=[]] return count
def receive(self, command_id, streams=('stdout', 'stderr'), command_timeout=60): """ Recieves data :param command_id: :param streams: :param command_timeout: :return: """ logging.info('receive command: ' + command_id) response_streams = dict.fromkeys(streams, '') (complete, exit_code) = self._receive_poll(command_id, response_streams) while not complete: (complete, exit_code) = self._receive_poll(command_id, response_streams) # This retains some compatibility with pywinrm if sorted(response_streams.keys()) == sorted(['stderr', 'stdout']): return response_streams['stdout'], response_streams['stderr'], exit_code else: return response_streams, exit_code
def function[receive, parameter[self, command_id, streams, command_timeout]]: constant[ Recieves data :param command_id: :param streams: :param command_timeout: :return: ] call[name[logging].info, parameter[binary_operation[constant[receive command: ] + name[command_id]]]] variable[response_streams] assign[=] call[name[dict].fromkeys, parameter[name[streams], constant[]]] <ast.Tuple object at 0x7da1b0c34160> assign[=] call[name[self]._receive_poll, parameter[name[command_id], name[response_streams]]] while <ast.UnaryOp object at 0x7da1b0b569e0> begin[:] <ast.Tuple object at 0x7da1b0b56a70> assign[=] call[name[self]._receive_poll, parameter[name[command_id], name[response_streams]]] if compare[call[name[sorted], parameter[call[name[response_streams].keys, parameter[]]]] equal[==] call[name[sorted], parameter[list[[<ast.Constant object at 0x7da1b0b55ff0>, <ast.Constant object at 0x7da1b0b54a00>]]]]] begin[:] return[tuple[[<ast.Subscript object at 0x7da1b0b56440>, <ast.Subscript object at 0x7da1b0b56080>, <ast.Name object at 0x7da1b0b55420>]]]
keyword[def] identifier[receive] ( identifier[self] , identifier[command_id] , identifier[streams] =( literal[string] , literal[string] ), identifier[command_timeout] = literal[int] ): literal[string] identifier[logging] . identifier[info] ( literal[string] + identifier[command_id] ) identifier[response_streams] = identifier[dict] . identifier[fromkeys] ( identifier[streams] , literal[string] ) ( identifier[complete] , identifier[exit_code] )= identifier[self] . identifier[_receive_poll] ( identifier[command_id] , identifier[response_streams] ) keyword[while] keyword[not] identifier[complete] : ( identifier[complete] , identifier[exit_code] )= identifier[self] . identifier[_receive_poll] ( identifier[command_id] , identifier[response_streams] ) keyword[if] identifier[sorted] ( identifier[response_streams] . identifier[keys] ())== identifier[sorted] ([ literal[string] , literal[string] ]): keyword[return] identifier[response_streams] [ literal[string] ], identifier[response_streams] [ literal[string] ], identifier[exit_code] keyword[else] : keyword[return] identifier[response_streams] , identifier[exit_code]
def receive(self, command_id, streams=('stdout', 'stderr'), command_timeout=60): """ Recieves data :param command_id: :param streams: :param command_timeout: :return: """ logging.info('receive command: ' + command_id) response_streams = dict.fromkeys(streams, '') (complete, exit_code) = self._receive_poll(command_id, response_streams) while not complete: (complete, exit_code) = self._receive_poll(command_id, response_streams) # depends on [control=['while'], data=[]] # This retains some compatibility with pywinrm if sorted(response_streams.keys()) == sorted(['stderr', 'stdout']): return (response_streams['stdout'], response_streams['stderr'], exit_code) # depends on [control=['if'], data=[]] else: return (response_streams, exit_code)
def _check_for_int(x): """ This is a compatibility function that takes a C{float} and converts it to an C{int} if the values are equal. """ try: y = int(x) except (OverflowError, ValueError): pass else: # There is no way in AMF0 to distinguish between integers and floats if x == x and y == x: return y return x
def function[_check_for_int, parameter[x]]: constant[ This is a compatibility function that takes a C{float} and converts it to an C{int} if the values are equal. ] <ast.Try object at 0x7da1b2345c00> return[name[x]]
keyword[def] identifier[_check_for_int] ( identifier[x] ): literal[string] keyword[try] : identifier[y] = identifier[int] ( identifier[x] ) keyword[except] ( identifier[OverflowError] , identifier[ValueError] ): keyword[pass] keyword[else] : keyword[if] identifier[x] == identifier[x] keyword[and] identifier[y] == identifier[x] : keyword[return] identifier[y] keyword[return] identifier[x]
def _check_for_int(x): """ This is a compatibility function that takes a C{float} and converts it to an C{int} if the values are equal. """ try: y = int(x) # depends on [control=['try'], data=[]] except (OverflowError, ValueError): pass # depends on [control=['except'], data=[]] else: # There is no way in AMF0 to distinguish between integers and floats if x == x and y == x: return y # depends on [control=['if'], data=[]] return x
def from_bytes(b): """ Generates either a HDPrivateKey or HDPublicKey from the underlying bytes. The serialization must conform to the description in: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#serialization-format Args: b (bytes): A byte stream conforming to the above. Returns: HDPrivateKey or HDPublicKey: Either an HD private or public key object, depending on what was serialized. """ if len(b) < 78: raise ValueError("b must be at least 78 bytes long.") version = int.from_bytes(b[:4], 'big') depth = b[4] parent_fingerprint = b[5:9] index = int.from_bytes(b[9:13], 'big') chain_code = b[13:45] key_bytes = b[45:78] rv = None if version == HDPrivateKey.MAINNET_VERSION or version == HDPrivateKey.TESTNET_VERSION: if key_bytes[0] != 0: raise ValueError("First byte of private key must be 0x00!") private_key = int.from_bytes(key_bytes[1:], 'big') rv = HDPrivateKey(key=private_key, chain_code=chain_code, index=index, depth=depth, parent_fingerprint=parent_fingerprint) elif version == HDPublicKey.MAINNET_VERSION or version == HDPublicKey.TESTNET_VERSION: if key_bytes[0] != 0x02 and key_bytes[0] != 0x03: raise ValueError("First byte of public key must be 0x02 or 0x03!") public_key = PublicKey.from_bytes(key_bytes) rv = HDPublicKey(x=public_key.point.x, y=public_key.point.y, chain_code=chain_code, index=index, depth=depth, parent_fingerprint=parent_fingerprint) else: raise ValueError("incorrect encoding.") return rv
def function[from_bytes, parameter[b]]: constant[ Generates either a HDPrivateKey or HDPublicKey from the underlying bytes. The serialization must conform to the description in: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#serialization-format Args: b (bytes): A byte stream conforming to the above. Returns: HDPrivateKey or HDPublicKey: Either an HD private or public key object, depending on what was serialized. ] if compare[call[name[len], parameter[name[b]]] less[<] constant[78]] begin[:] <ast.Raise object at 0x7da1b1da3190> variable[version] assign[=] call[name[int].from_bytes, parameter[call[name[b]][<ast.Slice object at 0x7da1b1da17e0>], constant[big]]] variable[depth] assign[=] call[name[b]][constant[4]] variable[parent_fingerprint] assign[=] call[name[b]][<ast.Slice object at 0x7da1b1da3a00>] variable[index] assign[=] call[name[int].from_bytes, parameter[call[name[b]][<ast.Slice object at 0x7da1b1da37c0>], constant[big]]] variable[chain_code] assign[=] call[name[b]][<ast.Slice object at 0x7da1b1da3880>] variable[key_bytes] assign[=] call[name[b]][<ast.Slice object at 0x7da1b1da1030>] variable[rv] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b1da3970> begin[:] if compare[call[name[key_bytes]][constant[0]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b1da34c0> variable[private_key] assign[=] call[name[int].from_bytes, parameter[call[name[key_bytes]][<ast.Slice object at 0x7da1b1da2dd0>], constant[big]]] variable[rv] assign[=] call[name[HDPrivateKey], parameter[]] return[name[rv]]
keyword[def] identifier[from_bytes] ( identifier[b] ): literal[string] keyword[if] identifier[len] ( identifier[b] )< literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[version] = identifier[int] . identifier[from_bytes] ( identifier[b] [: literal[int] ], literal[string] ) identifier[depth] = identifier[b] [ literal[int] ] identifier[parent_fingerprint] = identifier[b] [ literal[int] : literal[int] ] identifier[index] = identifier[int] . identifier[from_bytes] ( identifier[b] [ literal[int] : literal[int] ], literal[string] ) identifier[chain_code] = identifier[b] [ literal[int] : literal[int] ] identifier[key_bytes] = identifier[b] [ literal[int] : literal[int] ] identifier[rv] = keyword[None] keyword[if] identifier[version] == identifier[HDPrivateKey] . identifier[MAINNET_VERSION] keyword[or] identifier[version] == identifier[HDPrivateKey] . identifier[TESTNET_VERSION] : keyword[if] identifier[key_bytes] [ literal[int] ]!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[private_key] = identifier[int] . identifier[from_bytes] ( identifier[key_bytes] [ literal[int] :], literal[string] ) identifier[rv] = identifier[HDPrivateKey] ( identifier[key] = identifier[private_key] , identifier[chain_code] = identifier[chain_code] , identifier[index] = identifier[index] , identifier[depth] = identifier[depth] , identifier[parent_fingerprint] = identifier[parent_fingerprint] ) keyword[elif] identifier[version] == identifier[HDPublicKey] . identifier[MAINNET_VERSION] keyword[or] identifier[version] == identifier[HDPublicKey] . identifier[TESTNET_VERSION] : keyword[if] identifier[key_bytes] [ literal[int] ]!= literal[int] keyword[and] identifier[key_bytes] [ literal[int] ]!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[public_key] = identifier[PublicKey] . identifier[from_bytes] ( identifier[key_bytes] ) identifier[rv] = identifier[HDPublicKey] ( identifier[x] = identifier[public_key] . identifier[point] . identifier[x] , identifier[y] = identifier[public_key] . identifier[point] . identifier[y] , identifier[chain_code] = identifier[chain_code] , identifier[index] = identifier[index] , identifier[depth] = identifier[depth] , identifier[parent_fingerprint] = identifier[parent_fingerprint] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[rv]
def from_bytes(b): """ Generates either a HDPrivateKey or HDPublicKey from the underlying bytes. The serialization must conform to the description in: https://github.com/bitcoin/bips/blob/master/bip-0032.mediawiki#serialization-format Args: b (bytes): A byte stream conforming to the above. Returns: HDPrivateKey or HDPublicKey: Either an HD private or public key object, depending on what was serialized. """ if len(b) < 78: raise ValueError('b must be at least 78 bytes long.') # depends on [control=['if'], data=[]] version = int.from_bytes(b[:4], 'big') depth = b[4] parent_fingerprint = b[5:9] index = int.from_bytes(b[9:13], 'big') chain_code = b[13:45] key_bytes = b[45:78] rv = None if version == HDPrivateKey.MAINNET_VERSION or version == HDPrivateKey.TESTNET_VERSION: if key_bytes[0] != 0: raise ValueError('First byte of private key must be 0x00!') # depends on [control=['if'], data=[]] private_key = int.from_bytes(key_bytes[1:], 'big') rv = HDPrivateKey(key=private_key, chain_code=chain_code, index=index, depth=depth, parent_fingerprint=parent_fingerprint) # depends on [control=['if'], data=[]] elif version == HDPublicKey.MAINNET_VERSION or version == HDPublicKey.TESTNET_VERSION: if key_bytes[0] != 2 and key_bytes[0] != 3: raise ValueError('First byte of public key must be 0x02 or 0x03!') # depends on [control=['if'], data=[]] public_key = PublicKey.from_bytes(key_bytes) rv = HDPublicKey(x=public_key.point.x, y=public_key.point.y, chain_code=chain_code, index=index, depth=depth, parent_fingerprint=parent_fingerprint) # depends on [control=['if'], data=[]] else: raise ValueError('incorrect encoding.') return rv
def cudnnCreateFilterDescriptor(): """" Create a filter descriptor. This function creates a filter descriptor object by allocating the memory needed to hold its opaque structure. Parameters ---------- Returns ------- wDesc : cudnnFilterDescriptor Handle to a newly allocated filter descriptor. """ wDesc = ctypes.c_void_p() status = _libcudnn.cudnnCreateFilterDescriptor(ctypes.byref(wDesc)) cudnnCheckStatus(status) return wDesc.value
def function[cudnnCreateFilterDescriptor, parameter[]]: constant[" Create a filter descriptor. This function creates a filter descriptor object by allocating the memory needed to hold its opaque structure. Parameters ---------- Returns ------- wDesc : cudnnFilterDescriptor Handle to a newly allocated filter descriptor. ] variable[wDesc] assign[=] call[name[ctypes].c_void_p, parameter[]] variable[status] assign[=] call[name[_libcudnn].cudnnCreateFilterDescriptor, parameter[call[name[ctypes].byref, parameter[name[wDesc]]]]] call[name[cudnnCheckStatus], parameter[name[status]]] return[name[wDesc].value]
keyword[def] identifier[cudnnCreateFilterDescriptor] (): literal[string] identifier[wDesc] = identifier[ctypes] . identifier[c_void_p] () identifier[status] = identifier[_libcudnn] . identifier[cudnnCreateFilterDescriptor] ( identifier[ctypes] . identifier[byref] ( identifier[wDesc] )) identifier[cudnnCheckStatus] ( identifier[status] ) keyword[return] identifier[wDesc] . identifier[value]
def cudnnCreateFilterDescriptor(): """" Create a filter descriptor. This function creates a filter descriptor object by allocating the memory needed to hold its opaque structure. Parameters ---------- Returns ------- wDesc : cudnnFilterDescriptor Handle to a newly allocated filter descriptor. """ wDesc = ctypes.c_void_p() status = _libcudnn.cudnnCreateFilterDescriptor(ctypes.byref(wDesc)) cudnnCheckStatus(status) return wDesc.value
def _freeze(self) -> OrderedDict: """ Evaluate all of the column values and return the result :return: column/value tuples """ return OrderedDict(**{k: getattr(self, k, None) for k in super().__getattribute__("_columns")})
def function[_freeze, parameter[self]]: constant[ Evaluate all of the column values and return the result :return: column/value tuples ] return[call[name[OrderedDict], parameter[]]]
keyword[def] identifier[_freeze] ( identifier[self] )-> identifier[OrderedDict] : literal[string] keyword[return] identifier[OrderedDict] (**{ identifier[k] : identifier[getattr] ( identifier[self] , identifier[k] , keyword[None] ) keyword[for] identifier[k] keyword[in] identifier[super] (). identifier[__getattribute__] ( literal[string] )})
def _freeze(self) -> OrderedDict: """ Evaluate all of the column values and return the result :return: column/value tuples """ return OrderedDict(**{k: getattr(self, k, None) for k in super().__getattribute__('_columns')})
def SetSelected(self, node): """Set our selected node""" self.selected_node = node index = self.NodeToIndex(node) if index != -1: self.Focus(index) self.Select(index, True) return index
def function[SetSelected, parameter[self, node]]: constant[Set our selected node] name[self].selected_node assign[=] name[node] variable[index] assign[=] call[name[self].NodeToIndex, parameter[name[node]]] if compare[name[index] not_equal[!=] <ast.UnaryOp object at 0x7da18f00d540>] begin[:] call[name[self].Focus, parameter[name[index]]] call[name[self].Select, parameter[name[index], constant[True]]] return[name[index]]
keyword[def] identifier[SetSelected] ( identifier[self] , identifier[node] ): literal[string] identifier[self] . identifier[selected_node] = identifier[node] identifier[index] = identifier[self] . identifier[NodeToIndex] ( identifier[node] ) keyword[if] identifier[index] !=- literal[int] : identifier[self] . identifier[Focus] ( identifier[index] ) identifier[self] . identifier[Select] ( identifier[index] , keyword[True] ) keyword[return] identifier[index]
def SetSelected(self, node): """Set our selected node""" self.selected_node = node index = self.NodeToIndex(node) if index != -1: self.Focus(index) self.Select(index, True) # depends on [control=['if'], data=['index']] return index
def find_available_vc_vers(self): """ Find all available Microsoft Visual C++ versions. """ ms = self.ri.microsoft vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) vc_vers = [] for hkey in self.ri.HKEYS: for key in vckeys: try: bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) except (OSError, IOError): continue subkeys, values, _ = winreg.QueryInfoKey(bkey) for i in range(values): try: ver = float(winreg.EnumValue(bkey, i)[0]) if ver not in vc_vers: vc_vers.append(ver) except ValueError: pass for i in range(subkeys): try: ver = float(winreg.EnumKey(bkey, i)) if ver not in vc_vers: vc_vers.append(ver) except ValueError: pass return sorted(vc_vers)
def function[find_available_vc_vers, parameter[self]]: constant[ Find all available Microsoft Visual C++ versions. ] variable[ms] assign[=] name[self].ri.microsoft variable[vckeys] assign[=] tuple[[<ast.Attribute object at 0x7da1b1b87820>, <ast.Attribute object at 0x7da1b1b86f80>, <ast.Attribute object at 0x7da1b1b877c0>]] variable[vc_vers] assign[=] list[[]] for taget[name[hkey]] in starred[name[self].ri.HKEYS] begin[:] for taget[name[key]] in starred[name[vckeys]] begin[:] <ast.Try object at 0x7da1b1b84ee0> <ast.Tuple object at 0x7da1b1b873a0> assign[=] call[name[winreg].QueryInfoKey, parameter[name[bkey]]] for taget[name[i]] in starred[call[name[range], parameter[name[values]]]] begin[:] <ast.Try object at 0x7da1b1b84610> for taget[name[i]] in starred[call[name[range], parameter[name[subkeys]]]] begin[:] <ast.Try object at 0x7da1b1b86110> return[call[name[sorted], parameter[name[vc_vers]]]]
keyword[def] identifier[find_available_vc_vers] ( identifier[self] ): literal[string] identifier[ms] = identifier[self] . identifier[ri] . identifier[microsoft] identifier[vckeys] =( identifier[self] . identifier[ri] . identifier[vc] , identifier[self] . identifier[ri] . identifier[vc_for_python] , identifier[self] . identifier[ri] . identifier[vs] ) identifier[vc_vers] =[] keyword[for] identifier[hkey] keyword[in] identifier[self] . identifier[ri] . identifier[HKEYS] : keyword[for] identifier[key] keyword[in] identifier[vckeys] : keyword[try] : identifier[bkey] = identifier[winreg] . identifier[OpenKey] ( identifier[hkey] , identifier[ms] ( identifier[key] ), literal[int] , identifier[winreg] . identifier[KEY_READ] ) keyword[except] ( identifier[OSError] , identifier[IOError] ): keyword[continue] identifier[subkeys] , identifier[values] , identifier[_] = identifier[winreg] . identifier[QueryInfoKey] ( identifier[bkey] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[values] ): keyword[try] : identifier[ver] = identifier[float] ( identifier[winreg] . identifier[EnumValue] ( identifier[bkey] , identifier[i] )[ literal[int] ]) keyword[if] identifier[ver] keyword[not] keyword[in] identifier[vc_vers] : identifier[vc_vers] . identifier[append] ( identifier[ver] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[subkeys] ): keyword[try] : identifier[ver] = identifier[float] ( identifier[winreg] . identifier[EnumKey] ( identifier[bkey] , identifier[i] )) keyword[if] identifier[ver] keyword[not] keyword[in] identifier[vc_vers] : identifier[vc_vers] . identifier[append] ( identifier[ver] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[return] identifier[sorted] ( identifier[vc_vers] )
def find_available_vc_vers(self): """ Find all available Microsoft Visual C++ versions. """ ms = self.ri.microsoft vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs) vc_vers = [] for hkey in self.ri.HKEYS: for key in vckeys: try: bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ) # depends on [control=['try'], data=[]] except (OSError, IOError): continue # depends on [control=['except'], data=[]] (subkeys, values, _) = winreg.QueryInfoKey(bkey) for i in range(values): try: ver = float(winreg.EnumValue(bkey, i)[0]) if ver not in vc_vers: vc_vers.append(ver) # depends on [control=['if'], data=['ver', 'vc_vers']] # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] for i in range(subkeys): try: ver = float(winreg.EnumKey(bkey, i)) if ver not in vc_vers: vc_vers.append(ver) # depends on [control=['if'], data=['ver', 'vc_vers']] # depends on [control=['try'], data=[]] except ValueError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['hkey']] return sorted(vc_vers)
def network(self, network_id): """Returns :class:`Network` instance for ``network_id`` :type network_id: str :param network_id: This is the ID of the network. This can be found by visiting your class page on Piazza's web UI and grabbing it from https://piazza.com/class/{network_id} """ self._ensure_authenticated() return Network(network_id, self._rpc_api.session)
def function[network, parameter[self, network_id]]: constant[Returns :class:`Network` instance for ``network_id`` :type network_id: str :param network_id: This is the ID of the network. This can be found by visiting your class page on Piazza's web UI and grabbing it from https://piazza.com/class/{network_id} ] call[name[self]._ensure_authenticated, parameter[]] return[call[name[Network], parameter[name[network_id], name[self]._rpc_api.session]]]
keyword[def] identifier[network] ( identifier[self] , identifier[network_id] ): literal[string] identifier[self] . identifier[_ensure_authenticated] () keyword[return] identifier[Network] ( identifier[network_id] , identifier[self] . identifier[_rpc_api] . identifier[session] )
def network(self, network_id): """Returns :class:`Network` instance for ``network_id`` :type network_id: str :param network_id: This is the ID of the network. This can be found by visiting your class page on Piazza's web UI and grabbing it from https://piazza.com/class/{network_id} """ self._ensure_authenticated() return Network(network_id, self._rpc_api.session)
def mirror_sources(self, sourcedir, targetdir=None, recursive=True, excludes=[]): """ Mirroring compilable sources filepaths to their targets. Args: sourcedir (str): Directory path to scan. Keyword Arguments: absolute (bool): Returned paths will be absolute using ``sourcedir`` argument (if True), else return relative paths. recursive (bool): Switch to enabled recursive finding (if True). Default to True. excludes (list): A list of excluding patterns (glob patterns). Patterns are matched against the relative filepath (from its sourcedir). Returns: list: A list of pairs ``(source, target)``. Where ``target`` is the ``source`` path but renamed with ``.css`` extension. Relative directory from source dir is left unchanged but if given, returned paths will be absolute (using ``sourcedir`` for sources and ``targetdir`` for targets). """ sources = self.compilable_sources( sourcedir, absolute=False, recursive=recursive, excludes=excludes ) maplist = [] for filepath in sources: src = filepath dst = self.get_destination(src, targetdir=targetdir) # In absolute mode if targetdir: src = os.path.join(sourcedir, src) maplist.append((src, dst)) return maplist
def function[mirror_sources, parameter[self, sourcedir, targetdir, recursive, excludes]]: constant[ Mirroring compilable sources filepaths to their targets. Args: sourcedir (str): Directory path to scan. Keyword Arguments: absolute (bool): Returned paths will be absolute using ``sourcedir`` argument (if True), else return relative paths. recursive (bool): Switch to enabled recursive finding (if True). Default to True. excludes (list): A list of excluding patterns (glob patterns). Patterns are matched against the relative filepath (from its sourcedir). Returns: list: A list of pairs ``(source, target)``. Where ``target`` is the ``source`` path but renamed with ``.css`` extension. Relative directory from source dir is left unchanged but if given, returned paths will be absolute (using ``sourcedir`` for sources and ``targetdir`` for targets). ] variable[sources] assign[=] call[name[self].compilable_sources, parameter[name[sourcedir]]] variable[maplist] assign[=] list[[]] for taget[name[filepath]] in starred[name[sources]] begin[:] variable[src] assign[=] name[filepath] variable[dst] assign[=] call[name[self].get_destination, parameter[name[src]]] if name[targetdir] begin[:] variable[src] assign[=] call[name[os].path.join, parameter[name[sourcedir], name[src]]] call[name[maplist].append, parameter[tuple[[<ast.Name object at 0x7da1b0aa7820>, <ast.Name object at 0x7da1b0aa5660>]]]] return[name[maplist]]
keyword[def] identifier[mirror_sources] ( identifier[self] , identifier[sourcedir] , identifier[targetdir] = keyword[None] , identifier[recursive] = keyword[True] , identifier[excludes] =[]): literal[string] identifier[sources] = identifier[self] . identifier[compilable_sources] ( identifier[sourcedir] , identifier[absolute] = keyword[False] , identifier[recursive] = identifier[recursive] , identifier[excludes] = identifier[excludes] ) identifier[maplist] =[] keyword[for] identifier[filepath] keyword[in] identifier[sources] : identifier[src] = identifier[filepath] identifier[dst] = identifier[self] . identifier[get_destination] ( identifier[src] , identifier[targetdir] = identifier[targetdir] ) keyword[if] identifier[targetdir] : identifier[src] = identifier[os] . identifier[path] . identifier[join] ( identifier[sourcedir] , identifier[src] ) identifier[maplist] . identifier[append] (( identifier[src] , identifier[dst] )) keyword[return] identifier[maplist]
def mirror_sources(self, sourcedir, targetdir=None, recursive=True, excludes=[]): """ Mirroring compilable sources filepaths to their targets. Args: sourcedir (str): Directory path to scan. Keyword Arguments: absolute (bool): Returned paths will be absolute using ``sourcedir`` argument (if True), else return relative paths. recursive (bool): Switch to enabled recursive finding (if True). Default to True. excludes (list): A list of excluding patterns (glob patterns). Patterns are matched against the relative filepath (from its sourcedir). Returns: list: A list of pairs ``(source, target)``. Where ``target`` is the ``source`` path but renamed with ``.css`` extension. Relative directory from source dir is left unchanged but if given, returned paths will be absolute (using ``sourcedir`` for sources and ``targetdir`` for targets). """ sources = self.compilable_sources(sourcedir, absolute=False, recursive=recursive, excludes=excludes) maplist = [] for filepath in sources: src = filepath dst = self.get_destination(src, targetdir=targetdir) # In absolute mode if targetdir: src = os.path.join(sourcedir, src) # depends on [control=['if'], data=[]] maplist.append((src, dst)) # depends on [control=['for'], data=['filepath']] return maplist
def _get_nearest_indexer(self, target, limit, tolerance): """ Get the indexer for the nearest index labels; requires an index with values that can be subtracted from each other (e.g., not strings or tuples). """ left_indexer = self.get_indexer(target, 'pad', limit=limit) right_indexer = self.get_indexer(target, 'backfill', limit=limit) target = np.asarray(target) left_distances = abs(self.values[left_indexer] - target) right_distances = abs(self.values[right_indexer] - target) op = operator.lt if self.is_monotonic_increasing else operator.le indexer = np.where(op(left_distances, right_distances) | (right_indexer == -1), left_indexer, right_indexer) if tolerance is not None: indexer = self._filter_indexer_tolerance(target, indexer, tolerance) return indexer
def function[_get_nearest_indexer, parameter[self, target, limit, tolerance]]: constant[ Get the indexer for the nearest index labels; requires an index with values that can be subtracted from each other (e.g., not strings or tuples). ] variable[left_indexer] assign[=] call[name[self].get_indexer, parameter[name[target], constant[pad]]] variable[right_indexer] assign[=] call[name[self].get_indexer, parameter[name[target], constant[backfill]]] variable[target] assign[=] call[name[np].asarray, parameter[name[target]]] variable[left_distances] assign[=] call[name[abs], parameter[binary_operation[call[name[self].values][name[left_indexer]] - name[target]]]] variable[right_distances] assign[=] call[name[abs], parameter[binary_operation[call[name[self].values][name[right_indexer]] - name[target]]]] variable[op] assign[=] <ast.IfExp object at 0x7da1b2347ca0> variable[indexer] assign[=] call[name[np].where, parameter[binary_operation[call[name[op], parameter[name[left_distances], name[right_distances]]] <ast.BitOr object at 0x7da2590d6aa0> compare[name[right_indexer] equal[==] <ast.UnaryOp object at 0x7da18fe92d70>]], name[left_indexer], name[right_indexer]]] if compare[name[tolerance] is_not constant[None]] begin[:] variable[indexer] assign[=] call[name[self]._filter_indexer_tolerance, parameter[name[target], name[indexer], name[tolerance]]] return[name[indexer]]
keyword[def] identifier[_get_nearest_indexer] ( identifier[self] , identifier[target] , identifier[limit] , identifier[tolerance] ): literal[string] identifier[left_indexer] = identifier[self] . identifier[get_indexer] ( identifier[target] , literal[string] , identifier[limit] = identifier[limit] ) identifier[right_indexer] = identifier[self] . identifier[get_indexer] ( identifier[target] , literal[string] , identifier[limit] = identifier[limit] ) identifier[target] = identifier[np] . identifier[asarray] ( identifier[target] ) identifier[left_distances] = identifier[abs] ( identifier[self] . identifier[values] [ identifier[left_indexer] ]- identifier[target] ) identifier[right_distances] = identifier[abs] ( identifier[self] . identifier[values] [ identifier[right_indexer] ]- identifier[target] ) identifier[op] = identifier[operator] . identifier[lt] keyword[if] identifier[self] . identifier[is_monotonic_increasing] keyword[else] identifier[operator] . identifier[le] identifier[indexer] = identifier[np] . identifier[where] ( identifier[op] ( identifier[left_distances] , identifier[right_distances] )| ( identifier[right_indexer] ==- literal[int] ), identifier[left_indexer] , identifier[right_indexer] ) keyword[if] identifier[tolerance] keyword[is] keyword[not] keyword[None] : identifier[indexer] = identifier[self] . identifier[_filter_indexer_tolerance] ( identifier[target] , identifier[indexer] , identifier[tolerance] ) keyword[return] identifier[indexer]
def _get_nearest_indexer(self, target, limit, tolerance): """ Get the indexer for the nearest index labels; requires an index with values that can be subtracted from each other (e.g., not strings or tuples). """ left_indexer = self.get_indexer(target, 'pad', limit=limit) right_indexer = self.get_indexer(target, 'backfill', limit=limit) target = np.asarray(target) left_distances = abs(self.values[left_indexer] - target) right_distances = abs(self.values[right_indexer] - target) op = operator.lt if self.is_monotonic_increasing else operator.le indexer = np.where(op(left_distances, right_distances) | (right_indexer == -1), left_indexer, right_indexer) if tolerance is not None: indexer = self._filter_indexer_tolerance(target, indexer, tolerance) # depends on [control=['if'], data=['tolerance']] return indexer
def bel_edges( self, nanopub: Mapping[str, Any], namespace_targets: Mapping[str, List[str]] = {}, rules: List[str] = [], orthologize_target: str = None, ) -> List[Mapping[str, Any]]: """Create BEL Edges from BEL nanopub Args: nanopub (Mapping[str, Any]): bel nanopub namespace_targets (Mapping[str, List[str]]): what namespaces to canonicalize rules (List[str]): which computed edge rules to process, default is all, look at BEL Specification yaml file for computed edge signature keys, e.g. degradation, if any rule in list is 'skip', then skip computing edges just return primary_edge orthologize_target (str): species to convert BEL into, e.g. TAX:10090 for mouse, default option does not orthologize Returns: List[Mapping[str, Any]]: edge list with edge attributes (e.g. context) """ edges = bel.edge.edges.create_edges( nanopub, self.endpoint, namespace_targets=namespace_targets, rules=rules, orthologize_target=orthologize_target, ) return edges
def function[bel_edges, parameter[self, nanopub, namespace_targets, rules, orthologize_target]]: constant[Create BEL Edges from BEL nanopub Args: nanopub (Mapping[str, Any]): bel nanopub namespace_targets (Mapping[str, List[str]]): what namespaces to canonicalize rules (List[str]): which computed edge rules to process, default is all, look at BEL Specification yaml file for computed edge signature keys, e.g. degradation, if any rule in list is 'skip', then skip computing edges just return primary_edge orthologize_target (str): species to convert BEL into, e.g. TAX:10090 for mouse, default option does not orthologize Returns: List[Mapping[str, Any]]: edge list with edge attributes (e.g. context) ] variable[edges] assign[=] call[name[bel].edge.edges.create_edges, parameter[name[nanopub], name[self].endpoint]] return[name[edges]]
keyword[def] identifier[bel_edges] ( identifier[self] , identifier[nanopub] : identifier[Mapping] [ identifier[str] , identifier[Any] ], identifier[namespace_targets] : identifier[Mapping] [ identifier[str] , identifier[List] [ identifier[str] ]]={}, identifier[rules] : identifier[List] [ identifier[str] ]=[], identifier[orthologize_target] : identifier[str] = keyword[None] , )-> identifier[List] [ identifier[Mapping] [ identifier[str] , identifier[Any] ]]: literal[string] identifier[edges] = identifier[bel] . identifier[edge] . identifier[edges] . identifier[create_edges] ( identifier[nanopub] , identifier[self] . identifier[endpoint] , identifier[namespace_targets] = identifier[namespace_targets] , identifier[rules] = identifier[rules] , identifier[orthologize_target] = identifier[orthologize_target] , ) keyword[return] identifier[edges]
def bel_edges(self, nanopub: Mapping[str, Any], namespace_targets: Mapping[str, List[str]]={}, rules: List[str]=[], orthologize_target: str=None) -> List[Mapping[str, Any]]: """Create BEL Edges from BEL nanopub Args: nanopub (Mapping[str, Any]): bel nanopub namespace_targets (Mapping[str, List[str]]): what namespaces to canonicalize rules (List[str]): which computed edge rules to process, default is all, look at BEL Specification yaml file for computed edge signature keys, e.g. degradation, if any rule in list is 'skip', then skip computing edges just return primary_edge orthologize_target (str): species to convert BEL into, e.g. TAX:10090 for mouse, default option does not orthologize Returns: List[Mapping[str, Any]]: edge list with edge attributes (e.g. context) """ edges = bel.edge.edges.create_edges(nanopub, self.endpoint, namespace_targets=namespace_targets, rules=rules, orthologize_target=orthologize_target) return edges
def start(self): """ Start the GNS3 VM. """ # get a NAT interface number nat_interface_number = yield from self._look_for_interface("nat") if nat_interface_number < 0: raise GNS3VMError("The GNS3 VM: {} must have a NAT interface configured in order to start".format(self.vmname)) hostonly_interface_number = yield from self._look_for_interface("hostonly") if hostonly_interface_number < 0: raise GNS3VMError("The GNS3 VM: {} must have a host only interface configured in order to start".format(self.vmname)) vboxnet = yield from self._look_for_vboxnet(hostonly_interface_number) if vboxnet is None: raise GNS3VMError("VirtualBox host-only network could not be found for interface {} on GNS3 VM".format(hostonly_interface_number)) if not (yield from self._check_dhcp_server(vboxnet)): raise GNS3VMError("DHCP must be enabled on VirtualBox host-only network: {} for GNS3 VM".format(vboxnet)) vm_state = yield from self._get_state() log.info('"{}" state is {}'.format(self._vmname, vm_state)) if vm_state == "poweroff": yield from self.set_vcpus(self.vcpus) yield from self.set_ram(self.ram) if vm_state in ("poweroff", "saved"): # start the VM if it is not running args = [self._vmname] if self._headless: args.extend(["--type", "headless"]) yield from self._execute("startvm", args) elif vm_state == "paused": args = [self._vmname, "resume"] yield from self._execute("controlvm", args) ip_address = "127.0.0.1" try: # get a random port on localhost with socket.socket() as s: s.bind((ip_address, 0)) api_port = s.getsockname()[1] except OSError as e: raise GNS3VMError("Error while getting random port: {}".format(e)) if (yield from self._check_vbox_port_forwarding()): # delete the GNS3VM NAT port forwarding rule if it exists log.info("Removing GNS3VM NAT port forwarding rule from interface {}".format(nat_interface_number)) yield from self._execute("controlvm", [self._vmname, "natpf{}".format(nat_interface_number), "delete", "GNS3VM"]) # add a GNS3VM NAT port forwarding rule to redirect 127.0.0.1 with random port to port 3080 in the VM log.info("Adding GNS3VM NAT port forwarding rule with port {} to interface {}".format(api_port, nat_interface_number)) yield from self._execute("controlvm", [self._vmname, "natpf{}".format(nat_interface_number), "GNS3VM,tcp,{},{},,3080".format(ip_address, api_port)]) self.ip_address = yield from self._get_ip(hostonly_interface_number, api_port) self.port = 3080 log.info("GNS3 VM has been started with IP {}".format(self.ip_address)) self.running = True
def function[start, parameter[self]]: constant[ Start the GNS3 VM. ] variable[nat_interface_number] assign[=] <ast.YieldFrom object at 0x7da18f7229b0> if compare[name[nat_interface_number] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da18ede4e80> variable[hostonly_interface_number] assign[=] <ast.YieldFrom object at 0x7da18ede6ef0> if compare[name[hostonly_interface_number] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da18ede6b00> variable[vboxnet] assign[=] <ast.YieldFrom object at 0x7da18ede75e0> if compare[name[vboxnet] is constant[None]] begin[:] <ast.Raise object at 0x7da18f813ac0> if <ast.UnaryOp object at 0x7da18f811600> begin[:] <ast.Raise object at 0x7da18f810bb0> variable[vm_state] assign[=] <ast.YieldFrom object at 0x7da18f810100> call[name[log].info, parameter[call[constant["{}" state is {}].format, parameter[name[self]._vmname, name[vm_state]]]]] if compare[name[vm_state] equal[==] constant[poweroff]] begin[:] <ast.YieldFrom object at 0x7da18f810c10> <ast.YieldFrom object at 0x7da18f810e20> if compare[name[vm_state] in tuple[[<ast.Constant object at 0x7da18f8123b0>, <ast.Constant object at 0x7da18f811c60>]]] begin[:] variable[args] assign[=] list[[<ast.Attribute object at 0x7da18f812110>]] if name[self]._headless begin[:] call[name[args].extend, parameter[list[[<ast.Constant object at 0x7da18f811930>, <ast.Constant object at 0x7da18f813160>]]]] <ast.YieldFrom object at 0x7da18f811810> variable[ip_address] assign[=] constant[127.0.0.1] <ast.Try object at 0x7da18f812ad0> if <ast.YieldFrom object at 0x7da2044c12d0> begin[:] call[name[log].info, parameter[call[constant[Removing GNS3VM NAT port forwarding rule from interface {}].format, parameter[name[nat_interface_number]]]]] <ast.YieldFrom object at 0x7da2044c2800> call[name[log].info, parameter[call[constant[Adding GNS3VM NAT port forwarding rule with port {} to interface {}].format, parameter[name[api_port], name[nat_interface_number]]]]] <ast.YieldFrom object at 0x7da2044c29b0> name[self].ip_address assign[=] <ast.YieldFrom object at 0x7da2044c0250> name[self].port assign[=] constant[3080] call[name[log].info, parameter[call[constant[GNS3 VM has been started with IP {}].format, parameter[name[self].ip_address]]]] name[self].running assign[=] constant[True]
keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[nat_interface_number] = keyword[yield] keyword[from] identifier[self] . identifier[_look_for_interface] ( literal[string] ) keyword[if] identifier[nat_interface_number] < literal[int] : keyword[raise] identifier[GNS3VMError] ( literal[string] . identifier[format] ( identifier[self] . identifier[vmname] )) identifier[hostonly_interface_number] = keyword[yield] keyword[from] identifier[self] . identifier[_look_for_interface] ( literal[string] ) keyword[if] identifier[hostonly_interface_number] < literal[int] : keyword[raise] identifier[GNS3VMError] ( literal[string] . identifier[format] ( identifier[self] . identifier[vmname] )) identifier[vboxnet] = keyword[yield] keyword[from] identifier[self] . identifier[_look_for_vboxnet] ( identifier[hostonly_interface_number] ) keyword[if] identifier[vboxnet] keyword[is] keyword[None] : keyword[raise] identifier[GNS3VMError] ( literal[string] . identifier[format] ( identifier[hostonly_interface_number] )) keyword[if] keyword[not] ( keyword[yield] keyword[from] identifier[self] . identifier[_check_dhcp_server] ( identifier[vboxnet] )): keyword[raise] identifier[GNS3VMError] ( literal[string] . identifier[format] ( identifier[vboxnet] )) identifier[vm_state] = keyword[yield] keyword[from] identifier[self] . identifier[_get_state] () identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[_vmname] , identifier[vm_state] )) keyword[if] identifier[vm_state] == literal[string] : keyword[yield] keyword[from] identifier[self] . identifier[set_vcpus] ( identifier[self] . identifier[vcpus] ) keyword[yield] keyword[from] identifier[self] . identifier[set_ram] ( identifier[self] . identifier[ram] ) keyword[if] identifier[vm_state] keyword[in] ( literal[string] , literal[string] ): identifier[args] =[ identifier[self] . identifier[_vmname] ] keyword[if] identifier[self] . identifier[_headless] : identifier[args] . identifier[extend] ([ literal[string] , literal[string] ]) keyword[yield] keyword[from] identifier[self] . identifier[_execute] ( literal[string] , identifier[args] ) keyword[elif] identifier[vm_state] == literal[string] : identifier[args] =[ identifier[self] . identifier[_vmname] , literal[string] ] keyword[yield] keyword[from] identifier[self] . identifier[_execute] ( literal[string] , identifier[args] ) identifier[ip_address] = literal[string] keyword[try] : keyword[with] identifier[socket] . identifier[socket] () keyword[as] identifier[s] : identifier[s] . identifier[bind] (( identifier[ip_address] , literal[int] )) identifier[api_port] = identifier[s] . identifier[getsockname] ()[ literal[int] ] keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[raise] identifier[GNS3VMError] ( literal[string] . identifier[format] ( identifier[e] )) keyword[if] ( keyword[yield] keyword[from] identifier[self] . identifier[_check_vbox_port_forwarding] ()): identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[nat_interface_number] )) keyword[yield] keyword[from] identifier[self] . identifier[_execute] ( literal[string] ,[ identifier[self] . identifier[_vmname] , literal[string] . identifier[format] ( identifier[nat_interface_number] ), literal[string] , literal[string] ]) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[api_port] , identifier[nat_interface_number] )) keyword[yield] keyword[from] identifier[self] . identifier[_execute] ( literal[string] ,[ identifier[self] . identifier[_vmname] , literal[string] . identifier[format] ( identifier[nat_interface_number] ), literal[string] . identifier[format] ( identifier[ip_address] , identifier[api_port] )]) identifier[self] . identifier[ip_address] = keyword[yield] keyword[from] identifier[self] . identifier[_get_ip] ( identifier[hostonly_interface_number] , identifier[api_port] ) identifier[self] . identifier[port] = literal[int] identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[ip_address] )) identifier[self] . identifier[running] = keyword[True]
def start(self): """ Start the GNS3 VM. """ # get a NAT interface number nat_interface_number = (yield from self._look_for_interface('nat')) if nat_interface_number < 0: raise GNS3VMError('The GNS3 VM: {} must have a NAT interface configured in order to start'.format(self.vmname)) # depends on [control=['if'], data=[]] hostonly_interface_number = (yield from self._look_for_interface('hostonly')) if hostonly_interface_number < 0: raise GNS3VMError('The GNS3 VM: {} must have a host only interface configured in order to start'.format(self.vmname)) # depends on [control=['if'], data=[]] vboxnet = (yield from self._look_for_vboxnet(hostonly_interface_number)) if vboxnet is None: raise GNS3VMError('VirtualBox host-only network could not be found for interface {} on GNS3 VM'.format(hostonly_interface_number)) # depends on [control=['if'], data=[]] if not (yield from self._check_dhcp_server(vboxnet)): raise GNS3VMError('DHCP must be enabled on VirtualBox host-only network: {} for GNS3 VM'.format(vboxnet)) # depends on [control=['if'], data=[]] vm_state = (yield from self._get_state()) log.info('"{}" state is {}'.format(self._vmname, vm_state)) if vm_state == 'poweroff': yield from self.set_vcpus(self.vcpus) yield from self.set_ram(self.ram) # depends on [control=['if'], data=[]] if vm_state in ('poweroff', 'saved'): # start the VM if it is not running args = [self._vmname] if self._headless: args.extend(['--type', 'headless']) # depends on [control=['if'], data=[]] yield from self._execute('startvm', args) # depends on [control=['if'], data=[]] elif vm_state == 'paused': args = [self._vmname, 'resume'] yield from self._execute('controlvm', args) # depends on [control=['if'], data=[]] ip_address = '127.0.0.1' try: # get a random port on localhost with socket.socket() as s: s.bind((ip_address, 0)) api_port = s.getsockname()[1] # depends on [control=['with'], data=['s']] # depends on [control=['try'], data=[]] except OSError as e: raise GNS3VMError('Error while getting random port: {}'.format(e)) # depends on [control=['except'], data=['e']] if (yield from self._check_vbox_port_forwarding()): # delete the GNS3VM NAT port forwarding rule if it exists log.info('Removing GNS3VM NAT port forwarding rule from interface {}'.format(nat_interface_number)) yield from self._execute('controlvm', [self._vmname, 'natpf{}'.format(nat_interface_number), 'delete', 'GNS3VM']) # depends on [control=['if'], data=[]] # add a GNS3VM NAT port forwarding rule to redirect 127.0.0.1 with random port to port 3080 in the VM log.info('Adding GNS3VM NAT port forwarding rule with port {} to interface {}'.format(api_port, nat_interface_number)) yield from self._execute('controlvm', [self._vmname, 'natpf{}'.format(nat_interface_number), 'GNS3VM,tcp,{},{},,3080'.format(ip_address, api_port)]) self.ip_address = (yield from self._get_ip(hostonly_interface_number, api_port)) self.port = 3080 log.info('GNS3 VM has been started with IP {}'.format(self.ip_address)) self.running = True
def create(self, index, doc_type, body, id=None, **query_params): """ Adds a typed JSON document in a specific index, making it searchable. Behind the scenes this method calls index(..., op_type='create') `<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html>`_ :param index: The name of the index :param doc_type: The type of the document :param body: The document :param id: Document ID :arg consistency: Explicit write consistency setting for the operation, valid choices are: 'one', 'quorum', 'all' :arg op_type: Explicit operation type, default 'index', valid choices are: 'index', 'create' :arg parent: ID of the parent document :arg refresh: Refresh the index after performing the operation :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type, valid choices are: 'internal', 'external', 'external_gte', 'force' """ query_params['op_type'] = 'create' result = yield self.index(index, doc_type, body, id=id, params=query_params) returnValue(result)
def function[create, parameter[self, index, doc_type, body, id]]: constant[ Adds a typed JSON document in a specific index, making it searchable. Behind the scenes this method calls index(..., op_type='create') `<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html>`_ :param index: The name of the index :param doc_type: The type of the document :param body: The document :param id: Document ID :arg consistency: Explicit write consistency setting for the operation, valid choices are: 'one', 'quorum', 'all' :arg op_type: Explicit operation type, default 'index', valid choices are: 'index', 'create' :arg parent: ID of the parent document :arg refresh: Refresh the index after performing the operation :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type, valid choices are: 'internal', 'external', 'external_gte', 'force' ] call[name[query_params]][constant[op_type]] assign[=] constant[create] variable[result] assign[=] <ast.Yield object at 0x7da18c4cd390> call[name[returnValue], parameter[name[result]]]
keyword[def] identifier[create] ( identifier[self] , identifier[index] , identifier[doc_type] , identifier[body] , identifier[id] = keyword[None] ,** identifier[query_params] ): literal[string] identifier[query_params] [ literal[string] ]= literal[string] identifier[result] = keyword[yield] identifier[self] . identifier[index] ( identifier[index] , identifier[doc_type] , identifier[body] , identifier[id] = identifier[id] , identifier[params] = identifier[query_params] ) identifier[returnValue] ( identifier[result] )
def create(self, index, doc_type, body, id=None, **query_params): """ Adds a typed JSON document in a specific index, making it searchable. Behind the scenes this method calls index(..., op_type='create') `<http://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html>`_ :param index: The name of the index :param doc_type: The type of the document :param body: The document :param id: Document ID :arg consistency: Explicit write consistency setting for the operation, valid choices are: 'one', 'quorum', 'all' :arg op_type: Explicit operation type, default 'index', valid choices are: 'index', 'create' :arg parent: ID of the parent document :arg refresh: Refresh the index after performing the operation :arg routing: Specific routing value :arg timeout: Explicit operation timeout :arg timestamp: Explicit timestamp for the document :arg ttl: Expiration time for the document :arg version: Explicit version number for concurrency control :arg version_type: Specific version type, valid choices are: 'internal', 'external', 'external_gte', 'force' """ query_params['op_type'] = 'create' result = (yield self.index(index, doc_type, body, id=id, params=query_params)) returnValue(result)
def parse_fields(attributes): """Parse model fields.""" return tuple(field.bind_name(name) for name, field in six.iteritems(attributes) if isinstance(field, fields.Field))
def function[parse_fields, parameter[attributes]]: constant[Parse model fields.] return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da2054a6440>]]]
keyword[def] identifier[parse_fields] ( identifier[attributes] ): literal[string] keyword[return] identifier[tuple] ( identifier[field] . identifier[bind_name] ( identifier[name] ) keyword[for] identifier[name] , identifier[field] keyword[in] identifier[six] . identifier[iteritems] ( identifier[attributes] ) keyword[if] identifier[isinstance] ( identifier[field] , identifier[fields] . identifier[Field] ))
def parse_fields(attributes): """Parse model fields.""" return tuple((field.bind_name(name) for (name, field) in six.iteritems(attributes) if isinstance(field, fields.Field)))
def quote_names(db, names): """psycopg2 doesn't know how to quote identifier names, so we ask the server""" c = db.cursor() c.execute("SELECT pg_catalog.quote_ident(n) FROM pg_catalog.unnest(%s::text[]) n", [list(names)]) return [name for (name,) in c]
def function[quote_names, parameter[db, names]]: constant[psycopg2 doesn't know how to quote identifier names, so we ask the server] variable[c] assign[=] call[name[db].cursor, parameter[]] call[name[c].execute, parameter[constant[SELECT pg_catalog.quote_ident(n) FROM pg_catalog.unnest(%s::text[]) n], list[[<ast.Call object at 0x7da1b14075e0>]]]] return[<ast.ListComp object at 0x7da1b1407b80>]
keyword[def] identifier[quote_names] ( identifier[db] , identifier[names] ): literal[string] identifier[c] = identifier[db] . identifier[cursor] () identifier[c] . identifier[execute] ( literal[string] ,[ identifier[list] ( identifier[names] )]) keyword[return] [ identifier[name] keyword[for] ( identifier[name] ,) keyword[in] identifier[c] ]
def quote_names(db, names): """psycopg2 doesn't know how to quote identifier names, so we ask the server""" c = db.cursor() c.execute('SELECT pg_catalog.quote_ident(n) FROM pg_catalog.unnest(%s::text[]) n', [list(names)]) return [name for (name,) in c]
def parse_gptl(file_path, var_list): """ Read a GPTL timing file and extract some data. Args: file_path: the path to the GPTL timing file var_list: a list of strings to look for in the file Returns: A dict containing key-value pairs of the livvkit and the times associated with them """ timing_result = dict() if os.path.isfile(file_path): with open(file_path, 'r') as f: for var in var_list: for line in f: if var in line: timing_result[var] = float(line.split()[4])/int(line.split()[2]) break return timing_result
def function[parse_gptl, parameter[file_path, var_list]]: constant[ Read a GPTL timing file and extract some data. Args: file_path: the path to the GPTL timing file var_list: a list of strings to look for in the file Returns: A dict containing key-value pairs of the livvkit and the times associated with them ] variable[timing_result] assign[=] call[name[dict], parameter[]] if call[name[os].path.isfile, parameter[name[file_path]]] begin[:] with call[name[open], parameter[name[file_path], constant[r]]] begin[:] for taget[name[var]] in starred[name[var_list]] begin[:] for taget[name[line]] in starred[name[f]] begin[:] if compare[name[var] in name[line]] begin[:] call[name[timing_result]][name[var]] assign[=] binary_operation[call[name[float], parameter[call[call[name[line].split, parameter[]]][constant[4]]]] / call[name[int], parameter[call[call[name[line].split, parameter[]]][constant[2]]]]] break return[name[timing_result]]
keyword[def] identifier[parse_gptl] ( identifier[file_path] , identifier[var_list] ): literal[string] identifier[timing_result] = identifier[dict] () keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[file_path] ): keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[var] keyword[in] identifier[var_list] : keyword[for] identifier[line] keyword[in] identifier[f] : keyword[if] identifier[var] keyword[in] identifier[line] : identifier[timing_result] [ identifier[var] ]= identifier[float] ( identifier[line] . identifier[split] ()[ literal[int] ])/ identifier[int] ( identifier[line] . identifier[split] ()[ literal[int] ]) keyword[break] keyword[return] identifier[timing_result]
def parse_gptl(file_path, var_list): """ Read a GPTL timing file and extract some data. Args: file_path: the path to the GPTL timing file var_list: a list of strings to look for in the file Returns: A dict containing key-value pairs of the livvkit and the times associated with them """ timing_result = dict() if os.path.isfile(file_path): with open(file_path, 'r') as f: for var in var_list: for line in f: if var in line: timing_result[var] = float(line.split()[4]) / int(line.split()[2]) break # depends on [control=['if'], data=['var', 'line']] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['var']] # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] return timing_result
def get_frames(self): "Define an iterator that will return frames at the given blocksize" nb_frames = self.input_totalframes // self.output_blocksize if self.input_totalframes % self.output_blocksize == 0: nb_frames -= 1 # Last frame must send eod=True for index in xrange(0, nb_frames * self.output_blocksize, self.output_blocksize): yield (self.samples[index:index + self.output_blocksize], False) yield (self.samples[nb_frames * self.output_blocksize:], True)
def function[get_frames, parameter[self]]: constant[Define an iterator that will return frames at the given blocksize] variable[nb_frames] assign[=] binary_operation[name[self].input_totalframes <ast.FloorDiv object at 0x7da2590d6bc0> name[self].output_blocksize] if compare[binary_operation[name[self].input_totalframes <ast.Mod object at 0x7da2590d6920> name[self].output_blocksize] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b11a8730> for taget[name[index]] in starred[call[name[xrange], parameter[constant[0], binary_operation[name[nb_frames] * name[self].output_blocksize], name[self].output_blocksize]]] begin[:] <ast.Yield object at 0x7da1b11abdf0> <ast.Yield object at 0x7da1b11ab6d0>
keyword[def] identifier[get_frames] ( identifier[self] ): literal[string] identifier[nb_frames] = identifier[self] . identifier[input_totalframes] // identifier[self] . identifier[output_blocksize] keyword[if] identifier[self] . identifier[input_totalframes] % identifier[self] . identifier[output_blocksize] == literal[int] : identifier[nb_frames] -= literal[int] keyword[for] identifier[index] keyword[in] identifier[xrange] ( literal[int] , identifier[nb_frames] * identifier[self] . identifier[output_blocksize] , identifier[self] . identifier[output_blocksize] ): keyword[yield] ( identifier[self] . identifier[samples] [ identifier[index] : identifier[index] + identifier[self] . identifier[output_blocksize] ], keyword[False] ) keyword[yield] ( identifier[self] . identifier[samples] [ identifier[nb_frames] * identifier[self] . identifier[output_blocksize] :], keyword[True] )
def get_frames(self): """Define an iterator that will return frames at the given blocksize""" nb_frames = self.input_totalframes // self.output_blocksize if self.input_totalframes % self.output_blocksize == 0: nb_frames -= 1 # Last frame must send eod=True # depends on [control=['if'], data=[]] for index in xrange(0, nb_frames * self.output_blocksize, self.output_blocksize): yield (self.samples[index:index + self.output_blocksize], False) # depends on [control=['for'], data=['index']] yield (self.samples[nb_frames * self.output_blocksize:], True)
def _iterate_managers(connection, skip): """Iterate over instantiated managers.""" for idx, name, manager_cls in _iterate_manage_classes(skip): if name in skip: continue try: manager = manager_cls(connection=connection) except TypeError as e: click.secho(f'Could not instantiate {name}: {e}', fg='red') else: yield idx, name, manager
def function[_iterate_managers, parameter[connection, skip]]: constant[Iterate over instantiated managers.] for taget[tuple[[<ast.Name object at 0x7da1b00fa200>, <ast.Name object at 0x7da1b00f93c0>, <ast.Name object at 0x7da1b00f8b50>]]] in starred[call[name[_iterate_manage_classes], parameter[name[skip]]]] begin[:] if compare[name[name] in name[skip]] begin[:] continue <ast.Try object at 0x7da1b00fb4c0>
keyword[def] identifier[_iterate_managers] ( identifier[connection] , identifier[skip] ): literal[string] keyword[for] identifier[idx] , identifier[name] , identifier[manager_cls] keyword[in] identifier[_iterate_manage_classes] ( identifier[skip] ): keyword[if] identifier[name] keyword[in] identifier[skip] : keyword[continue] keyword[try] : identifier[manager] = identifier[manager_cls] ( identifier[connection] = identifier[connection] ) keyword[except] identifier[TypeError] keyword[as] identifier[e] : identifier[click] . identifier[secho] ( literal[string] , identifier[fg] = literal[string] ) keyword[else] : keyword[yield] identifier[idx] , identifier[name] , identifier[manager]
def _iterate_managers(connection, skip): """Iterate over instantiated managers.""" for (idx, name, manager_cls) in _iterate_manage_classes(skip): if name in skip: continue # depends on [control=['if'], data=[]] try: manager = manager_cls(connection=connection) # depends on [control=['try'], data=[]] except TypeError as e: click.secho(f'Could not instantiate {name}: {e}', fg='red') # depends on [control=['except'], data=['e']] else: yield (idx, name, manager) # depends on [control=['for'], data=[]]
def from_json(cls, data, result=None): """ Create new Node element from JSON data :param data: Element data from JSON :type data: Dict :param result: The result this element belongs to :type result: overpy.Result :return: New instance of Node :rtype: overpy.Node :raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match. """ if data.get("type") != cls._type_value: raise exception.ElementDataWrongType( type_expected=cls._type_value, type_provided=data.get("type") ) tags = data.get("tags", {}) node_id = data.get("id") lat = data.get("lat") lon = data.get("lon") attributes = {} ignore = ["type", "id", "lat", "lon", "tags"] for n, v in data.items(): if n in ignore: continue attributes[n] = v return cls(node_id=node_id, lat=lat, lon=lon, tags=tags, attributes=attributes, result=result)
def function[from_json, parameter[cls, data, result]]: constant[ Create new Node element from JSON data :param data: Element data from JSON :type data: Dict :param result: The result this element belongs to :type result: overpy.Result :return: New instance of Node :rtype: overpy.Node :raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match. ] if compare[call[name[data].get, parameter[constant[type]]] not_equal[!=] name[cls]._type_value] begin[:] <ast.Raise object at 0x7da1b042e140> variable[tags] assign[=] call[name[data].get, parameter[constant[tags], dictionary[[], []]]] variable[node_id] assign[=] call[name[data].get, parameter[constant[id]]] variable[lat] assign[=] call[name[data].get, parameter[constant[lat]]] variable[lon] assign[=] call[name[data].get, parameter[constant[lon]]] variable[attributes] assign[=] dictionary[[], []] variable[ignore] assign[=] list[[<ast.Constant object at 0x7da1b042ee90>, <ast.Constant object at 0x7da1b042d330>, <ast.Constant object at 0x7da1b042e170>, <ast.Constant object at 0x7da1b042e0b0>, <ast.Constant object at 0x7da1b042ccd0>]] for taget[tuple[[<ast.Name object at 0x7da1b042e020>, <ast.Name object at 0x7da1b042ebc0>]]] in starred[call[name[data].items, parameter[]]] begin[:] if compare[name[n] in name[ignore]] begin[:] continue call[name[attributes]][name[n]] assign[=] name[v] return[call[name[cls], parameter[]]]
keyword[def] identifier[from_json] ( identifier[cls] , identifier[data] , identifier[result] = keyword[None] ): literal[string] keyword[if] identifier[data] . identifier[get] ( literal[string] )!= identifier[cls] . identifier[_type_value] : keyword[raise] identifier[exception] . identifier[ElementDataWrongType] ( identifier[type_expected] = identifier[cls] . identifier[_type_value] , identifier[type_provided] = identifier[data] . identifier[get] ( literal[string] ) ) identifier[tags] = identifier[data] . identifier[get] ( literal[string] ,{}) identifier[node_id] = identifier[data] . identifier[get] ( literal[string] ) identifier[lat] = identifier[data] . identifier[get] ( literal[string] ) identifier[lon] = identifier[data] . identifier[get] ( literal[string] ) identifier[attributes] ={} identifier[ignore] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[n] , identifier[v] keyword[in] identifier[data] . identifier[items] (): keyword[if] identifier[n] keyword[in] identifier[ignore] : keyword[continue] identifier[attributes] [ identifier[n] ]= identifier[v] keyword[return] identifier[cls] ( identifier[node_id] = identifier[node_id] , identifier[lat] = identifier[lat] , identifier[lon] = identifier[lon] , identifier[tags] = identifier[tags] , identifier[attributes] = identifier[attributes] , identifier[result] = identifier[result] )
def from_json(cls, data, result=None): """ Create new Node element from JSON data :param data: Element data from JSON :type data: Dict :param result: The result this element belongs to :type result: overpy.Result :return: New instance of Node :rtype: overpy.Node :raises overpy.exception.ElementDataWrongType: If type value of the passed JSON data does not match. """ if data.get('type') != cls._type_value: raise exception.ElementDataWrongType(type_expected=cls._type_value, type_provided=data.get('type')) # depends on [control=['if'], data=[]] tags = data.get('tags', {}) node_id = data.get('id') lat = data.get('lat') lon = data.get('lon') attributes = {} ignore = ['type', 'id', 'lat', 'lon', 'tags'] for (n, v) in data.items(): if n in ignore: continue # depends on [control=['if'], data=[]] attributes[n] = v # depends on [control=['for'], data=[]] return cls(node_id=node_id, lat=lat, lon=lon, tags=tags, attributes=attributes, result=result)
def add_predicate(self, predicate_obj): """ Adds a predicate to the semantic layer @type predicate_obj: L{Cpredicate} @param predicate_obj: the predicate object """ if self.srl_layer is None: self.srl_layer = Csrl() self.root.append(self.srl_layer.get_node()) self.srl_layer.add_predicate(predicate_obj)
def function[add_predicate, parameter[self, predicate_obj]]: constant[ Adds a predicate to the semantic layer @type predicate_obj: L{Cpredicate} @param predicate_obj: the predicate object ] if compare[name[self].srl_layer is constant[None]] begin[:] name[self].srl_layer assign[=] call[name[Csrl], parameter[]] call[name[self].root.append, parameter[call[name[self].srl_layer.get_node, parameter[]]]] call[name[self].srl_layer.add_predicate, parameter[name[predicate_obj]]]
keyword[def] identifier[add_predicate] ( identifier[self] , identifier[predicate_obj] ): literal[string] keyword[if] identifier[self] . identifier[srl_layer] keyword[is] keyword[None] : identifier[self] . identifier[srl_layer] = identifier[Csrl] () identifier[self] . identifier[root] . identifier[append] ( identifier[self] . identifier[srl_layer] . identifier[get_node] ()) identifier[self] . identifier[srl_layer] . identifier[add_predicate] ( identifier[predicate_obj] )
def add_predicate(self, predicate_obj): """ Adds a predicate to the semantic layer @type predicate_obj: L{Cpredicate} @param predicate_obj: the predicate object """ if self.srl_layer is None: self.srl_layer = Csrl() self.root.append(self.srl_layer.get_node()) # depends on [control=['if'], data=[]] self.srl_layer.add_predicate(predicate_obj)
def battlecry_requires_target(self): """ True if the play action of the card requires a target """ if self.has_combo and self.controller.combo: if PlayReq.REQ_TARGET_FOR_COMBO in self.requirements: return True for req in TARGETING_PREREQUISITES: if req in self.requirements: return True return False
def function[battlecry_requires_target, parameter[self]]: constant[ True if the play action of the card requires a target ] if <ast.BoolOp object at 0x7da18ede4160> begin[:] if compare[name[PlayReq].REQ_TARGET_FOR_COMBO in name[self].requirements] begin[:] return[constant[True]] for taget[name[req]] in starred[name[TARGETING_PREREQUISITES]] begin[:] if compare[name[req] in name[self].requirements] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[battlecry_requires_target] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[has_combo] keyword[and] identifier[self] . identifier[controller] . identifier[combo] : keyword[if] identifier[PlayReq] . identifier[REQ_TARGET_FOR_COMBO] keyword[in] identifier[self] . identifier[requirements] : keyword[return] keyword[True] keyword[for] identifier[req] keyword[in] identifier[TARGETING_PREREQUISITES] : keyword[if] identifier[req] keyword[in] identifier[self] . identifier[requirements] : keyword[return] keyword[True] keyword[return] keyword[False]
def battlecry_requires_target(self): """ True if the play action of the card requires a target """ if self.has_combo and self.controller.combo: if PlayReq.REQ_TARGET_FOR_COMBO in self.requirements: return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] for req in TARGETING_PREREQUISITES: if req in self.requirements: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['req']] return False
def dbg_print(self, indent=0): """ Print out debugging information. """ lst = [] more_data = False for i, addr in enumerate(self.mem.keys()): lst.append(addr) if i >= 20: more_data = True break for addr in sorted(lst): data = self.mem[addr] if isinstance(data, SimMemoryObject): memobj = data print("%s%xh: (%s)[%d]" % (" " * indent, addr, memobj, addr - memobj.base)) else: print("%s%xh: <default data>" % (" " * indent, addr)) if more_data: print("%s..." % (" " * indent))
def function[dbg_print, parameter[self, indent]]: constant[ Print out debugging information. ] variable[lst] assign[=] list[[]] variable[more_data] assign[=] constant[False] for taget[tuple[[<ast.Name object at 0x7da18eb55f30>, <ast.Name object at 0x7da18eb57970>]]] in starred[call[name[enumerate], parameter[call[name[self].mem.keys, parameter[]]]]] begin[:] call[name[lst].append, parameter[name[addr]]] if compare[name[i] greater_or_equal[>=] constant[20]] begin[:] variable[more_data] assign[=] constant[True] break for taget[name[addr]] in starred[call[name[sorted], parameter[name[lst]]]] begin[:] variable[data] assign[=] call[name[self].mem][name[addr]] if call[name[isinstance], parameter[name[data], name[SimMemoryObject]]] begin[:] variable[memobj] assign[=] name[data] call[name[print], parameter[binary_operation[constant[%s%xh: (%s)[%d]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da18bc72bf0>, <ast.Name object at 0x7da18bc733d0>, <ast.Name object at 0x7da18bc737f0>, <ast.BinOp object at 0x7da18bc70f40>]]]]] if name[more_data] begin[:] call[name[print], parameter[binary_operation[constant[%s...] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[ ] * name[indent]]]]]
keyword[def] identifier[dbg_print] ( identifier[self] , identifier[indent] = literal[int] ): literal[string] identifier[lst] =[] identifier[more_data] = keyword[False] keyword[for] identifier[i] , identifier[addr] keyword[in] identifier[enumerate] ( identifier[self] . identifier[mem] . identifier[keys] ()): identifier[lst] . identifier[append] ( identifier[addr] ) keyword[if] identifier[i] >= literal[int] : identifier[more_data] = keyword[True] keyword[break] keyword[for] identifier[addr] keyword[in] identifier[sorted] ( identifier[lst] ): identifier[data] = identifier[self] . identifier[mem] [ identifier[addr] ] keyword[if] identifier[isinstance] ( identifier[data] , identifier[SimMemoryObject] ): identifier[memobj] = identifier[data] identifier[print] ( literal[string] %( literal[string] * identifier[indent] , identifier[addr] , identifier[memobj] , identifier[addr] - identifier[memobj] . identifier[base] )) keyword[else] : identifier[print] ( literal[string] %( literal[string] * identifier[indent] , identifier[addr] )) keyword[if] identifier[more_data] : identifier[print] ( literal[string] %( literal[string] * identifier[indent] ))
def dbg_print(self, indent=0): """ Print out debugging information. """ lst = [] more_data = False for (i, addr) in enumerate(self.mem.keys()): lst.append(addr) if i >= 20: more_data = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] for addr in sorted(lst): data = self.mem[addr] if isinstance(data, SimMemoryObject): memobj = data print('%s%xh: (%s)[%d]' % (' ' * indent, addr, memobj, addr - memobj.base)) # depends on [control=['if'], data=[]] else: print('%s%xh: <default data>' % (' ' * indent, addr)) # depends on [control=['for'], data=['addr']] if more_data: print('%s...' % (' ' * indent)) # depends on [control=['if'], data=[]]
def cmd_move(db=None): """Rename a database within a server. When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of place in the form DEST_old_YYYYMMDD (unless --no-backup is specified). """ if db is None: db = connect() pg_move_extended(db, args.src, args.dest)
def function[cmd_move, parameter[db]]: constant[Rename a database within a server. When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of place in the form DEST_old_YYYYMMDD (unless --no-backup is specified). ] if compare[name[db] is constant[None]] begin[:] variable[db] assign[=] call[name[connect], parameter[]] call[name[pg_move_extended], parameter[name[db], name[args].src, name[args].dest]]
keyword[def] identifier[cmd_move] ( identifier[db] = keyword[None] ): literal[string] keyword[if] identifier[db] keyword[is] keyword[None] : identifier[db] = identifier[connect] () identifier[pg_move_extended] ( identifier[db] , identifier[args] . identifier[src] , identifier[args] . identifier[dest] )
def cmd_move(db=None): """Rename a database within a server. When used with --force, an existing database with the same name as DEST is replaced, the original is renamed out of place in the form DEST_old_YYYYMMDD (unless --no-backup is specified). """ if db is None: db = connect() # depends on [control=['if'], data=['db']] pg_move_extended(db, args.src, args.dest)
def compare(a, b): """Compares two timestamps. ``a`` and ``b`` must be the same type, in addition to normal representations of timestamps that order naturally, they can be rfc3339 formatted strings. Args: a (string|object): a timestamp b (string|object): another timestamp Returns: int: -1 if a < b, 0 if a == b or 1 if a > b Raises: ValueError: if a or b are not the same type ValueError: if a or b strings but not in valid rfc3339 format """ a_is_text = isinstance(a, basestring) b_is_text = isinstance(b, basestring) if type(a) != type(b) and not (a_is_text and b_is_text): _logger.error(u'Cannot compare %s to %s, types differ %s!=%s', a, b, type(a), type(b)) raise ValueError(u'cannot compare inputs of differing types') if a_is_text: a = from_rfc3339(a, with_nanos=True) b = from_rfc3339(b, with_nanos=True) if a < b: return -1 elif a > b: return 1 else: return 0
def function[compare, parameter[a, b]]: constant[Compares two timestamps. ``a`` and ``b`` must be the same type, in addition to normal representations of timestamps that order naturally, they can be rfc3339 formatted strings. Args: a (string|object): a timestamp b (string|object): another timestamp Returns: int: -1 if a < b, 0 if a == b or 1 if a > b Raises: ValueError: if a or b are not the same type ValueError: if a or b strings but not in valid rfc3339 format ] variable[a_is_text] assign[=] call[name[isinstance], parameter[name[a], name[basestring]]] variable[b_is_text] assign[=] call[name[isinstance], parameter[name[b], name[basestring]]] if <ast.BoolOp object at 0x7da18f09eb00> begin[:] call[name[_logger].error, parameter[constant[Cannot compare %s to %s, types differ %s!=%s], name[a], name[b], call[name[type], parameter[name[a]]], call[name[type], parameter[name[b]]]]] <ast.Raise object at 0x7da18f58e530> if name[a_is_text] begin[:] variable[a] assign[=] call[name[from_rfc3339], parameter[name[a]]] variable[b] assign[=] call[name[from_rfc3339], parameter[name[b]]] if compare[name[a] less[<] name[b]] begin[:] return[<ast.UnaryOp object at 0x7da1b0471f60>]
keyword[def] identifier[compare] ( identifier[a] , identifier[b] ): literal[string] identifier[a_is_text] = identifier[isinstance] ( identifier[a] , identifier[basestring] ) identifier[b_is_text] = identifier[isinstance] ( identifier[b] , identifier[basestring] ) keyword[if] identifier[type] ( identifier[a] )!= identifier[type] ( identifier[b] ) keyword[and] keyword[not] ( identifier[a_is_text] keyword[and] identifier[b_is_text] ): identifier[_logger] . identifier[error] ( literal[string] , identifier[a] , identifier[b] , identifier[type] ( identifier[a] ), identifier[type] ( identifier[b] )) keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[a_is_text] : identifier[a] = identifier[from_rfc3339] ( identifier[a] , identifier[with_nanos] = keyword[True] ) identifier[b] = identifier[from_rfc3339] ( identifier[b] , identifier[with_nanos] = keyword[True] ) keyword[if] identifier[a] < identifier[b] : keyword[return] - literal[int] keyword[elif] identifier[a] > identifier[b] : keyword[return] literal[int] keyword[else] : keyword[return] literal[int]
def compare(a, b): """Compares two timestamps. ``a`` and ``b`` must be the same type, in addition to normal representations of timestamps that order naturally, they can be rfc3339 formatted strings. Args: a (string|object): a timestamp b (string|object): another timestamp Returns: int: -1 if a < b, 0 if a == b or 1 if a > b Raises: ValueError: if a or b are not the same type ValueError: if a or b strings but not in valid rfc3339 format """ a_is_text = isinstance(a, basestring) b_is_text = isinstance(b, basestring) if type(a) != type(b) and (not (a_is_text and b_is_text)): _logger.error(u'Cannot compare %s to %s, types differ %s!=%s', a, b, type(a), type(b)) raise ValueError(u'cannot compare inputs of differing types') # depends on [control=['if'], data=[]] if a_is_text: a = from_rfc3339(a, with_nanos=True) b = from_rfc3339(b, with_nanos=True) # depends on [control=['if'], data=[]] if a < b: return -1 # depends on [control=['if'], data=[]] elif a > b: return 1 # depends on [control=['if'], data=[]] else: return 0
def call_servo(examples, serving_bundle): """Send an RPC request to the Servomatic prediction service. Args: examples: A list of examples that matches the model spec. serving_bundle: A `ServingBundle` object that contains the information to make the serving request. Returns: A ClassificationResponse or RegressionResponse proto. """ parsed_url = urlparse('http://' + serving_bundle.inference_address) channel = implementations.insecure_channel(parsed_url.hostname, parsed_url.port) stub = prediction_service_pb2.beta_create_PredictionService_stub(channel) if serving_bundle.use_predict: request = predict_pb2.PredictRequest() elif serving_bundle.model_type == 'classification': request = classification_pb2.ClassificationRequest() else: request = regression_pb2.RegressionRequest() request.model_spec.name = serving_bundle.model_name if serving_bundle.model_version is not None: request.model_spec.version.value = serving_bundle.model_version if serving_bundle.signature is not None: request.model_spec.signature_name = serving_bundle.signature if serving_bundle.use_predict: # tf.compat.v1 API used here to convert tf.example into proto. This # utility file is bundled in the witwidget pip package which has a dep # on TensorFlow. request.inputs[serving_bundle.predict_input_tensor].CopyFrom( tf.compat.v1.make_tensor_proto( values=[ex.SerializeToString() for ex in examples], dtype=types_pb2.DT_STRING)) else: request.input.example_list.examples.extend(examples) if serving_bundle.use_predict: return common_utils.convert_predict_response( stub.Predict(request, 30.0), serving_bundle) # 30 secs timeout elif serving_bundle.model_type == 'classification': return stub.Classify(request, 30.0) # 30 secs timeout else: return stub.Regress(request, 30.0)
def function[call_servo, parameter[examples, serving_bundle]]: constant[Send an RPC request to the Servomatic prediction service. Args: examples: A list of examples that matches the model spec. serving_bundle: A `ServingBundle` object that contains the information to make the serving request. Returns: A ClassificationResponse or RegressionResponse proto. ] variable[parsed_url] assign[=] call[name[urlparse], parameter[binary_operation[constant[http://] + name[serving_bundle].inference_address]]] variable[channel] assign[=] call[name[implementations].insecure_channel, parameter[name[parsed_url].hostname, name[parsed_url].port]] variable[stub] assign[=] call[name[prediction_service_pb2].beta_create_PredictionService_stub, parameter[name[channel]]] if name[serving_bundle].use_predict begin[:] variable[request] assign[=] call[name[predict_pb2].PredictRequest, parameter[]] name[request].model_spec.name assign[=] name[serving_bundle].model_name if compare[name[serving_bundle].model_version is_not constant[None]] begin[:] name[request].model_spec.version.value assign[=] name[serving_bundle].model_version if compare[name[serving_bundle].signature is_not constant[None]] begin[:] name[request].model_spec.signature_name assign[=] name[serving_bundle].signature if name[serving_bundle].use_predict begin[:] call[call[name[request].inputs][name[serving_bundle].predict_input_tensor].CopyFrom, parameter[call[name[tf].compat.v1.make_tensor_proto, parameter[]]]] if name[serving_bundle].use_predict begin[:] return[call[name[common_utils].convert_predict_response, parameter[call[name[stub].Predict, parameter[name[request], constant[30.0]]], name[serving_bundle]]]]
keyword[def] identifier[call_servo] ( identifier[examples] , identifier[serving_bundle] ): literal[string] identifier[parsed_url] = identifier[urlparse] ( literal[string] + identifier[serving_bundle] . identifier[inference_address] ) identifier[channel] = identifier[implementations] . identifier[insecure_channel] ( identifier[parsed_url] . identifier[hostname] , identifier[parsed_url] . identifier[port] ) identifier[stub] = identifier[prediction_service_pb2] . identifier[beta_create_PredictionService_stub] ( identifier[channel] ) keyword[if] identifier[serving_bundle] . identifier[use_predict] : identifier[request] = identifier[predict_pb2] . identifier[PredictRequest] () keyword[elif] identifier[serving_bundle] . identifier[model_type] == literal[string] : identifier[request] = identifier[classification_pb2] . identifier[ClassificationRequest] () keyword[else] : identifier[request] = identifier[regression_pb2] . identifier[RegressionRequest] () identifier[request] . identifier[model_spec] . identifier[name] = identifier[serving_bundle] . identifier[model_name] keyword[if] identifier[serving_bundle] . identifier[model_version] keyword[is] keyword[not] keyword[None] : identifier[request] . identifier[model_spec] . identifier[version] . identifier[value] = identifier[serving_bundle] . identifier[model_version] keyword[if] identifier[serving_bundle] . identifier[signature] keyword[is] keyword[not] keyword[None] : identifier[request] . identifier[model_spec] . identifier[signature_name] = identifier[serving_bundle] . identifier[signature] keyword[if] identifier[serving_bundle] . identifier[use_predict] : identifier[request] . identifier[inputs] [ identifier[serving_bundle] . identifier[predict_input_tensor] ]. identifier[CopyFrom] ( identifier[tf] . identifier[compat] . identifier[v1] . identifier[make_tensor_proto] ( identifier[values] =[ identifier[ex] . identifier[SerializeToString] () keyword[for] identifier[ex] keyword[in] identifier[examples] ], identifier[dtype] = identifier[types_pb2] . identifier[DT_STRING] )) keyword[else] : identifier[request] . identifier[input] . identifier[example_list] . identifier[examples] . identifier[extend] ( identifier[examples] ) keyword[if] identifier[serving_bundle] . identifier[use_predict] : keyword[return] identifier[common_utils] . identifier[convert_predict_response] ( identifier[stub] . identifier[Predict] ( identifier[request] , literal[int] ), identifier[serving_bundle] ) keyword[elif] identifier[serving_bundle] . identifier[model_type] == literal[string] : keyword[return] identifier[stub] . identifier[Classify] ( identifier[request] , literal[int] ) keyword[else] : keyword[return] identifier[stub] . identifier[Regress] ( identifier[request] , literal[int] )
def call_servo(examples, serving_bundle): """Send an RPC request to the Servomatic prediction service. Args: examples: A list of examples that matches the model spec. serving_bundle: A `ServingBundle` object that contains the information to make the serving request. Returns: A ClassificationResponse or RegressionResponse proto. """ parsed_url = urlparse('http://' + serving_bundle.inference_address) channel = implementations.insecure_channel(parsed_url.hostname, parsed_url.port) stub = prediction_service_pb2.beta_create_PredictionService_stub(channel) if serving_bundle.use_predict: request = predict_pb2.PredictRequest() # depends on [control=['if'], data=[]] elif serving_bundle.model_type == 'classification': request = classification_pb2.ClassificationRequest() # depends on [control=['if'], data=[]] else: request = regression_pb2.RegressionRequest() request.model_spec.name = serving_bundle.model_name if serving_bundle.model_version is not None: request.model_spec.version.value = serving_bundle.model_version # depends on [control=['if'], data=[]] if serving_bundle.signature is not None: request.model_spec.signature_name = serving_bundle.signature # depends on [control=['if'], data=[]] if serving_bundle.use_predict: # tf.compat.v1 API used here to convert tf.example into proto. This # utility file is bundled in the witwidget pip package which has a dep # on TensorFlow. request.inputs[serving_bundle.predict_input_tensor].CopyFrom(tf.compat.v1.make_tensor_proto(values=[ex.SerializeToString() for ex in examples], dtype=types_pb2.DT_STRING)) # depends on [control=['if'], data=[]] else: request.input.example_list.examples.extend(examples) if serving_bundle.use_predict: return common_utils.convert_predict_response(stub.Predict(request, 30.0), serving_bundle) # 30 secs timeout # depends on [control=['if'], data=[]] elif serving_bundle.model_type == 'classification': return stub.Classify(request, 30.0) # 30 secs timeout # depends on [control=['if'], data=[]] else: return stub.Regress(request, 30.0)
def remove_coreference_layer(self): """ Removes the constituency layer (if exists) of the object (in memory) """ if self.coreference_layer is not None: this_node = self.coreference_layer.get_node() self.root.remove(this_node) if self.header is not None: self.header.remove_lp('coreferences')
def function[remove_coreference_layer, parameter[self]]: constant[ Removes the constituency layer (if exists) of the object (in memory) ] if compare[name[self].coreference_layer is_not constant[None]] begin[:] variable[this_node] assign[=] call[name[self].coreference_layer.get_node, parameter[]] call[name[self].root.remove, parameter[name[this_node]]] if compare[name[self].header is_not constant[None]] begin[:] call[name[self].header.remove_lp, parameter[constant[coreferences]]]
keyword[def] identifier[remove_coreference_layer] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[coreference_layer] keyword[is] keyword[not] keyword[None] : identifier[this_node] = identifier[self] . identifier[coreference_layer] . identifier[get_node] () identifier[self] . identifier[root] . identifier[remove] ( identifier[this_node] ) keyword[if] identifier[self] . identifier[header] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[header] . identifier[remove_lp] ( literal[string] )
def remove_coreference_layer(self): """ Removes the constituency layer (if exists) of the object (in memory) """ if self.coreference_layer is not None: this_node = self.coreference_layer.get_node() self.root.remove(this_node) # depends on [control=['if'], data=[]] if self.header is not None: self.header.remove_lp('coreferences') # depends on [control=['if'], data=[]]
def _create_container_ships(self, hosts): """ :param hosts: :return: """ container_ships = {} if hosts: if 'default' not in hosts: default_container_ship = self._create_container_ship(None) container_ships['default'] = {default_container_ship.url.geturl(): default_container_ship} for alias, hosts in six.iteritems(hosts): if hosts is None: container_ships[alias] = hosts elif isinstance(hosts, list): container_ships[alias] = {} for host in hosts: if not host or not isinstance(host, dict): raise ValueError("hosts: {0} is required to be a dict.".format(alias)) existing_container_ship = None for container_ship_dict in six.itervalues(container_ships): for address, container_ship in six.iteritems(container_ship_dict): if address == host.get('address') and address not in container_ships[alias]: existing_container_ship = container_ship break if existing_container_ship is None: container_ships[alias][host.get('address')] = self._create_container_ship(host) else: container_ships[alias][host.get('address')] = existing_container_ship else: raise ValueError(logger.error("hosts is required to be a list or None. host: {0}".format(hosts))) else: default_container_ship = self._create_container_ship(None) container_ships['default'] = {default_container_ship.url.geturl(): default_container_ship} return container_ships
def function[_create_container_ships, parameter[self, hosts]]: constant[ :param hosts: :return: ] variable[container_ships] assign[=] dictionary[[], []] if name[hosts] begin[:] if compare[constant[default] <ast.NotIn object at 0x7da2590d7190> name[hosts]] begin[:] variable[default_container_ship] assign[=] call[name[self]._create_container_ship, parameter[constant[None]]] call[name[container_ships]][constant[default]] assign[=] dictionary[[<ast.Call object at 0x7da207f9afe0>], [<ast.Name object at 0x7da207f982b0>]] for taget[tuple[[<ast.Name object at 0x7da207f9a440>, <ast.Name object at 0x7da207f994b0>]]] in starred[call[name[six].iteritems, parameter[name[hosts]]]] begin[:] if compare[name[hosts] is constant[None]] begin[:] call[name[container_ships]][name[alias]] assign[=] name[hosts] return[name[container_ships]]
keyword[def] identifier[_create_container_ships] ( identifier[self] , identifier[hosts] ): literal[string] identifier[container_ships] ={} keyword[if] identifier[hosts] : keyword[if] literal[string] keyword[not] keyword[in] identifier[hosts] : identifier[default_container_ship] = identifier[self] . identifier[_create_container_ship] ( keyword[None] ) identifier[container_ships] [ literal[string] ]={ identifier[default_container_ship] . identifier[url] . identifier[geturl] (): identifier[default_container_ship] } keyword[for] identifier[alias] , identifier[hosts] keyword[in] identifier[six] . identifier[iteritems] ( identifier[hosts] ): keyword[if] identifier[hosts] keyword[is] keyword[None] : identifier[container_ships] [ identifier[alias] ]= identifier[hosts] keyword[elif] identifier[isinstance] ( identifier[hosts] , identifier[list] ): identifier[container_ships] [ identifier[alias] ]={} keyword[for] identifier[host] keyword[in] identifier[hosts] : keyword[if] keyword[not] identifier[host] keyword[or] keyword[not] identifier[isinstance] ( identifier[host] , identifier[dict] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[alias] )) identifier[existing_container_ship] = keyword[None] keyword[for] identifier[container_ship_dict] keyword[in] identifier[six] . identifier[itervalues] ( identifier[container_ships] ): keyword[for] identifier[address] , identifier[container_ship] keyword[in] identifier[six] . identifier[iteritems] ( identifier[container_ship_dict] ): keyword[if] identifier[address] == identifier[host] . identifier[get] ( literal[string] ) keyword[and] identifier[address] keyword[not] keyword[in] identifier[container_ships] [ identifier[alias] ]: identifier[existing_container_ship] = identifier[container_ship] keyword[break] keyword[if] identifier[existing_container_ship] keyword[is] keyword[None] : identifier[container_ships] [ identifier[alias] ][ identifier[host] . identifier[get] ( literal[string] )]= identifier[self] . identifier[_create_container_ship] ( identifier[host] ) keyword[else] : identifier[container_ships] [ identifier[alias] ][ identifier[host] . identifier[get] ( literal[string] )]= identifier[existing_container_ship] keyword[else] : keyword[raise] identifier[ValueError] ( identifier[logger] . identifier[error] ( literal[string] . identifier[format] ( identifier[hosts] ))) keyword[else] : identifier[default_container_ship] = identifier[self] . identifier[_create_container_ship] ( keyword[None] ) identifier[container_ships] [ literal[string] ]={ identifier[default_container_ship] . identifier[url] . identifier[geturl] (): identifier[default_container_ship] } keyword[return] identifier[container_ships]
def _create_container_ships(self, hosts): """ :param hosts: :return: """ container_ships = {} if hosts: if 'default' not in hosts: default_container_ship = self._create_container_ship(None) container_ships['default'] = {default_container_ship.url.geturl(): default_container_ship} # depends on [control=['if'], data=[]] for (alias, hosts) in six.iteritems(hosts): if hosts is None: container_ships[alias] = hosts # depends on [control=['if'], data=['hosts']] elif isinstance(hosts, list): container_ships[alias] = {} for host in hosts: if not host or not isinstance(host, dict): raise ValueError('hosts: {0} is required to be a dict.'.format(alias)) # depends on [control=['if'], data=[]] existing_container_ship = None for container_ship_dict in six.itervalues(container_ships): for (address, container_ship) in six.iteritems(container_ship_dict): if address == host.get('address') and address not in container_ships[alias]: existing_container_ship = container_ship break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['container_ship_dict']] if existing_container_ship is None: container_ships[alias][host.get('address')] = self._create_container_ship(host) # depends on [control=['if'], data=[]] else: container_ships[alias][host.get('address')] = existing_container_ship # depends on [control=['for'], data=['host']] # depends on [control=['if'], data=[]] else: raise ValueError(logger.error('hosts is required to be a list or None. host: {0}'.format(hosts))) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: default_container_ship = self._create_container_ship(None) container_ships['default'] = {default_container_ship.url.geturl(): default_container_ship} return container_ships
def list(self, include_claimed=False, echo=False, marker=None, limit=None): """ Returns a list of messages for this queue. By default only unclaimed messages are returned; if you want claimed messages included, pass `include_claimed=True`. Also, the requester's own messages are not returned by default; if you want them included, pass `echo=True`. The 'marker' and 'limit' parameters are used to control pagination of results. 'Marker' is the ID of the last message returned, while 'limit' controls the number of messages returned per reuqest (default=20). """ return self._message_manager.list(include_claimed=include_claimed, echo=echo, marker=marker, limit=limit)
def function[list, parameter[self, include_claimed, echo, marker, limit]]: constant[ Returns a list of messages for this queue. By default only unclaimed messages are returned; if you want claimed messages included, pass `include_claimed=True`. Also, the requester's own messages are not returned by default; if you want them included, pass `echo=True`. The 'marker' and 'limit' parameters are used to control pagination of results. 'Marker' is the ID of the last message returned, while 'limit' controls the number of messages returned per reuqest (default=20). ] return[call[name[self]._message_manager.list, parameter[]]]
keyword[def] identifier[list] ( identifier[self] , identifier[include_claimed] = keyword[False] , identifier[echo] = keyword[False] , identifier[marker] = keyword[None] , identifier[limit] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_message_manager] . identifier[list] ( identifier[include_claimed] = identifier[include_claimed] , identifier[echo] = identifier[echo] , identifier[marker] = identifier[marker] , identifier[limit] = identifier[limit] )
def list(self, include_claimed=False, echo=False, marker=None, limit=None): """ Returns a list of messages for this queue. By default only unclaimed messages are returned; if you want claimed messages included, pass `include_claimed=True`. Also, the requester's own messages are not returned by default; if you want them included, pass `echo=True`. The 'marker' and 'limit' parameters are used to control pagination of results. 'Marker' is the ID of the last message returned, while 'limit' controls the number of messages returned per reuqest (default=20). """ return self._message_manager.list(include_claimed=include_claimed, echo=echo, marker=marker, limit=limit)
def GetFileLines(filename, newline=None, encoding=None): ''' Reads a file and returns its contents as a list of lines. Works for both local and remote files. :param unicode filename: :param None|''|'\n'|'\r'|'\r\n' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns list(unicode): The file's lines .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' return GetFileContents( filename, binary=False, encoding=encoding, newline=newline, ).split('\n')
def function[GetFileLines, parameter[filename, newline, encoding]]: constant[ Reads a file and returns its contents as a list of lines. Works for both local and remote files. :param unicode filename: :param None|''|' '|' '|' ' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns list(unicode): The file's lines .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ] return[call[call[name[GetFileContents], parameter[name[filename]]].split, parameter[constant[ ]]]]
keyword[def] identifier[GetFileLines] ( identifier[filename] , identifier[newline] = keyword[None] , identifier[encoding] = keyword[None] ): literal[string] keyword[return] identifier[GetFileContents] ( identifier[filename] , identifier[binary] = keyword[False] , identifier[encoding] = identifier[encoding] , identifier[newline] = identifier[newline] , ). identifier[split] ( literal[string] )
def GetFileLines(filename, newline=None, encoding=None): """ Reads a file and returns its contents as a list of lines. Works for both local and remote files. :param unicode filename: :param None|''|' '|'\r'|'\r ' newline: Controls universal newlines. See 'io.open' newline parameter documentation for more details. :param unicode encoding: File's encoding. If not None, contents obtained from file will be decoded using this `encoding`. :returns list(unicode): The file's lines .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information """ return GetFileContents(filename, binary=False, encoding=encoding, newline=newline).split('\n')
def get_session_token(self): """ Use the accession token to request a new session token """ # self.logging.info('Getting session token') # Rather than testing any previous session tokens to see if they are still valid, simply delete old tokens in # preparation of the creation of new ones try: os.remove(os.path.join(self.file_path, 'session_token')) except FileNotFoundError: pass # Create a new session session_request = OAuth1Session(self.consumer_key, self.consumer_secret, access_token=self.access_token, access_token_secret=self.access_secret) # Perform a GET request with the appropriate keys and tokens r = session_request.get(self.session_token_url) # If the status code is '200' (OK), proceed if r.status_code == 200: # Save the JSON-decoded token secret and token self.session_token = r.json()['oauth_token'] self.session_secret = r.json()['oauth_token_secret'] # Write the token and secret to file self.write_token('session_token', self.session_token, self.session_secret) # Any other status than 200 is considered a failure else: print('Failed:') print(r.json()['message'])
def function[get_session_token, parameter[self]]: constant[ Use the accession token to request a new session token ] <ast.Try object at 0x7da1b1eedc60> variable[session_request] assign[=] call[name[OAuth1Session], parameter[name[self].consumer_key, name[self].consumer_secret]] variable[r] assign[=] call[name[session_request].get, parameter[name[self].session_token_url]] if compare[name[r].status_code equal[==] constant[200]] begin[:] name[self].session_token assign[=] call[call[name[r].json, parameter[]]][constant[oauth_token]] name[self].session_secret assign[=] call[call[name[r].json, parameter[]]][constant[oauth_token_secret]] call[name[self].write_token, parameter[constant[session_token], name[self].session_token, name[self].session_secret]]
keyword[def] identifier[get_session_token] ( identifier[self] ): literal[string] keyword[try] : identifier[os] . identifier[remove] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[file_path] , literal[string] )) keyword[except] identifier[FileNotFoundError] : keyword[pass] identifier[session_request] = identifier[OAuth1Session] ( identifier[self] . identifier[consumer_key] , identifier[self] . identifier[consumer_secret] , identifier[access_token] = identifier[self] . identifier[access_token] , identifier[access_token_secret] = identifier[self] . identifier[access_secret] ) identifier[r] = identifier[session_request] . identifier[get] ( identifier[self] . identifier[session_token_url] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : identifier[self] . identifier[session_token] = identifier[r] . identifier[json] ()[ literal[string] ] identifier[self] . identifier[session_secret] = identifier[r] . identifier[json] ()[ literal[string] ] identifier[self] . identifier[write_token] ( literal[string] , identifier[self] . identifier[session_token] , identifier[self] . identifier[session_secret] ) keyword[else] : identifier[print] ( literal[string] ) identifier[print] ( identifier[r] . identifier[json] ()[ literal[string] ])
def get_session_token(self): """ Use the accession token to request a new session token """ # self.logging.info('Getting session token') # Rather than testing any previous session tokens to see if they are still valid, simply delete old tokens in # preparation of the creation of new ones try: os.remove(os.path.join(self.file_path, 'session_token')) # depends on [control=['try'], data=[]] except FileNotFoundError: pass # depends on [control=['except'], data=[]] # Create a new session session_request = OAuth1Session(self.consumer_key, self.consumer_secret, access_token=self.access_token, access_token_secret=self.access_secret) # Perform a GET request with the appropriate keys and tokens r = session_request.get(self.session_token_url) # If the status code is '200' (OK), proceed if r.status_code == 200: # Save the JSON-decoded token secret and token self.session_token = r.json()['oauth_token'] self.session_secret = r.json()['oauth_token_secret'] # Write the token and secret to file self.write_token('session_token', self.session_token, self.session_secret) # depends on [control=['if'], data=[]] else: # Any other status than 200 is considered a failure print('Failed:') print(r.json()['message'])
def reproject_to_grid_coordinates(self, grid_coordinates, interp=gdalconst.GRA_NearestNeighbour): """ Reprojects data in this layer to match that in the GridCoordinates object. """ source_dataset = self.grid_coordinates._as_gdal_dataset() dest_dataset = grid_coordinates._as_gdal_dataset() rb = source_dataset.GetRasterBand(1) rb.SetNoDataValue(NO_DATA_VALUE) rb.WriteArray(np.ma.filled(self.raster_data, NO_DATA_VALUE)) gdal.ReprojectImage(source_dataset, dest_dataset, source_dataset.GetProjection(), dest_dataset.GetProjection(), interp) dest_layer = self.clone_traits() dest_layer.grid_coordinates = grid_coordinates rb = dest_dataset.GetRasterBand(1) dest_layer.raster_data = np.ma.masked_values(rb.ReadAsArray(), NO_DATA_VALUE) return dest_layer
def function[reproject_to_grid_coordinates, parameter[self, grid_coordinates, interp]]: constant[ Reprojects data in this layer to match that in the GridCoordinates object. ] variable[source_dataset] assign[=] call[name[self].grid_coordinates._as_gdal_dataset, parameter[]] variable[dest_dataset] assign[=] call[name[grid_coordinates]._as_gdal_dataset, parameter[]] variable[rb] assign[=] call[name[source_dataset].GetRasterBand, parameter[constant[1]]] call[name[rb].SetNoDataValue, parameter[name[NO_DATA_VALUE]]] call[name[rb].WriteArray, parameter[call[name[np].ma.filled, parameter[name[self].raster_data, name[NO_DATA_VALUE]]]]] call[name[gdal].ReprojectImage, parameter[name[source_dataset], name[dest_dataset], call[name[source_dataset].GetProjection, parameter[]], call[name[dest_dataset].GetProjection, parameter[]], name[interp]]] variable[dest_layer] assign[=] call[name[self].clone_traits, parameter[]] name[dest_layer].grid_coordinates assign[=] name[grid_coordinates] variable[rb] assign[=] call[name[dest_dataset].GetRasterBand, parameter[constant[1]]] name[dest_layer].raster_data assign[=] call[name[np].ma.masked_values, parameter[call[name[rb].ReadAsArray, parameter[]], name[NO_DATA_VALUE]]] return[name[dest_layer]]
keyword[def] identifier[reproject_to_grid_coordinates] ( identifier[self] , identifier[grid_coordinates] , identifier[interp] = identifier[gdalconst] . identifier[GRA_NearestNeighbour] ): literal[string] identifier[source_dataset] = identifier[self] . identifier[grid_coordinates] . identifier[_as_gdal_dataset] () identifier[dest_dataset] = identifier[grid_coordinates] . identifier[_as_gdal_dataset] () identifier[rb] = identifier[source_dataset] . identifier[GetRasterBand] ( literal[int] ) identifier[rb] . identifier[SetNoDataValue] ( identifier[NO_DATA_VALUE] ) identifier[rb] . identifier[WriteArray] ( identifier[np] . identifier[ma] . identifier[filled] ( identifier[self] . identifier[raster_data] , identifier[NO_DATA_VALUE] )) identifier[gdal] . identifier[ReprojectImage] ( identifier[source_dataset] , identifier[dest_dataset] , identifier[source_dataset] . identifier[GetProjection] (), identifier[dest_dataset] . identifier[GetProjection] (), identifier[interp] ) identifier[dest_layer] = identifier[self] . identifier[clone_traits] () identifier[dest_layer] . identifier[grid_coordinates] = identifier[grid_coordinates] identifier[rb] = identifier[dest_dataset] . identifier[GetRasterBand] ( literal[int] ) identifier[dest_layer] . identifier[raster_data] = identifier[np] . identifier[ma] . identifier[masked_values] ( identifier[rb] . identifier[ReadAsArray] (), identifier[NO_DATA_VALUE] ) keyword[return] identifier[dest_layer]
def reproject_to_grid_coordinates(self, grid_coordinates, interp=gdalconst.GRA_NearestNeighbour): """ Reprojects data in this layer to match that in the GridCoordinates object. """ source_dataset = self.grid_coordinates._as_gdal_dataset() dest_dataset = grid_coordinates._as_gdal_dataset() rb = source_dataset.GetRasterBand(1) rb.SetNoDataValue(NO_DATA_VALUE) rb.WriteArray(np.ma.filled(self.raster_data, NO_DATA_VALUE)) gdal.ReprojectImage(source_dataset, dest_dataset, source_dataset.GetProjection(), dest_dataset.GetProjection(), interp) dest_layer = self.clone_traits() dest_layer.grid_coordinates = grid_coordinates rb = dest_dataset.GetRasterBand(1) dest_layer.raster_data = np.ma.masked_values(rb.ReadAsArray(), NO_DATA_VALUE) return dest_layer
def get_data(img_path): """get the (1, 3, h, w) np.array data for the supplied image Args: img_path (string): the input image path Returns: np.array: image data in a (1, 3, h, w) shape """ mean = np.array([123.68, 116.779, 103.939]) # (R,G,B) img = Image.open(img_path) img = np.array(img, dtype=np.float32) reshaped_mean = mean.reshape(1, 1, 3) img = img - reshaped_mean img = np.swapaxes(img, 0, 2) img = np.swapaxes(img, 1, 2) img = np.expand_dims(img, axis=0) return img
def function[get_data, parameter[img_path]]: constant[get the (1, 3, h, w) np.array data for the supplied image Args: img_path (string): the input image path Returns: np.array: image data in a (1, 3, h, w) shape ] variable[mean] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b2066950>, <ast.Constant object at 0x7da1b2064af0>, <ast.Constant object at 0x7da1b2064550>]]]] variable[img] assign[=] call[name[Image].open, parameter[name[img_path]]] variable[img] assign[=] call[name[np].array, parameter[name[img]]] variable[reshaped_mean] assign[=] call[name[mean].reshape, parameter[constant[1], constant[1], constant[3]]] variable[img] assign[=] binary_operation[name[img] - name[reshaped_mean]] variable[img] assign[=] call[name[np].swapaxes, parameter[name[img], constant[0], constant[2]]] variable[img] assign[=] call[name[np].swapaxes, parameter[name[img], constant[1], constant[2]]] variable[img] assign[=] call[name[np].expand_dims, parameter[name[img]]] return[name[img]]
keyword[def] identifier[get_data] ( identifier[img_path] ): literal[string] identifier[mean] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] ]) identifier[img] = identifier[Image] . identifier[open] ( identifier[img_path] ) identifier[img] = identifier[np] . identifier[array] ( identifier[img] , identifier[dtype] = identifier[np] . identifier[float32] ) identifier[reshaped_mean] = identifier[mean] . identifier[reshape] ( literal[int] , literal[int] , literal[int] ) identifier[img] = identifier[img] - identifier[reshaped_mean] identifier[img] = identifier[np] . identifier[swapaxes] ( identifier[img] , literal[int] , literal[int] ) identifier[img] = identifier[np] . identifier[swapaxes] ( identifier[img] , literal[int] , literal[int] ) identifier[img] = identifier[np] . identifier[expand_dims] ( identifier[img] , identifier[axis] = literal[int] ) keyword[return] identifier[img]
def get_data(img_path): """get the (1, 3, h, w) np.array data for the supplied image Args: img_path (string): the input image path Returns: np.array: image data in a (1, 3, h, w) shape """ mean = np.array([123.68, 116.779, 103.939]) # (R,G,B) img = Image.open(img_path) img = np.array(img, dtype=np.float32) reshaped_mean = mean.reshape(1, 1, 3) img = img - reshaped_mean img = np.swapaxes(img, 0, 2) img = np.swapaxes(img, 1, 2) img = np.expand_dims(img, axis=0) return img
def expand_rmf_matrix(rmf): """Expand an RMF matrix stored in compressed form. *rmf* An RMF object as might be returned by ``sherpa.astro.ui.get_rmf()``. Returns: A non-sparse RMF matrix. The Response Matrix Function (RMF) of an X-ray telescope like Chandra can be stored in a sparse format as defined in `OGIP Calibration Memo CAL/GEN/92-002 <https://heasarc.gsfc.nasa.gov/docs/heasarc/caldb/docs/memos/cal_gen_92_002/cal_gen_92_002.html>`_. For visualization and analysis purposes, it can be useful to de-sparsify the matrices stored in this way. This function does that, returning a two-dimensional Numpy array. """ n_chan = rmf.e_min.size n_energy = rmf.n_grp.size expanded = np.zeros((n_energy, n_chan)) mtx_ofs = 0 grp_ofs = 0 for i in range(n_energy): for j in range(rmf.n_grp[i]): f = rmf.f_chan[grp_ofs] n = rmf.n_chan[grp_ofs] expanded[i,f:f+n] = rmf.matrix[mtx_ofs:mtx_ofs+n] mtx_ofs += n grp_ofs += 1 return expanded
def function[expand_rmf_matrix, parameter[rmf]]: constant[Expand an RMF matrix stored in compressed form. *rmf* An RMF object as might be returned by ``sherpa.astro.ui.get_rmf()``. Returns: A non-sparse RMF matrix. The Response Matrix Function (RMF) of an X-ray telescope like Chandra can be stored in a sparse format as defined in `OGIP Calibration Memo CAL/GEN/92-002 <https://heasarc.gsfc.nasa.gov/docs/heasarc/caldb/docs/memos/cal_gen_92_002/cal_gen_92_002.html>`_. For visualization and analysis purposes, it can be useful to de-sparsify the matrices stored in this way. This function does that, returning a two-dimensional Numpy array. ] variable[n_chan] assign[=] name[rmf].e_min.size variable[n_energy] assign[=] name[rmf].n_grp.size variable[expanded] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b27aafe0>, <ast.Name object at 0x7da1b27ab850>]]]] variable[mtx_ofs] assign[=] constant[0] variable[grp_ofs] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[name[n_energy]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[call[name[rmf].n_grp][name[i]]]]] begin[:] variable[f] assign[=] call[name[rmf].f_chan][name[grp_ofs]] variable[n] assign[=] call[name[rmf].n_chan][name[grp_ofs]] call[name[expanded]][tuple[[<ast.Name object at 0x7da1b26b6fe0>, <ast.Slice object at 0x7da1b26b6d40>]]] assign[=] call[name[rmf].matrix][<ast.Slice object at 0x7da1b26b67d0>] <ast.AugAssign object at 0x7da1b26b69b0> <ast.AugAssign object at 0x7da1b27a63e0> return[name[expanded]]
keyword[def] identifier[expand_rmf_matrix] ( identifier[rmf] ): literal[string] identifier[n_chan] = identifier[rmf] . identifier[e_min] . identifier[size] identifier[n_energy] = identifier[rmf] . identifier[n_grp] . identifier[size] identifier[expanded] = identifier[np] . identifier[zeros] (( identifier[n_energy] , identifier[n_chan] )) identifier[mtx_ofs] = literal[int] identifier[grp_ofs] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_energy] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[rmf] . identifier[n_grp] [ identifier[i] ]): identifier[f] = identifier[rmf] . identifier[f_chan] [ identifier[grp_ofs] ] identifier[n] = identifier[rmf] . identifier[n_chan] [ identifier[grp_ofs] ] identifier[expanded] [ identifier[i] , identifier[f] : identifier[f] + identifier[n] ]= identifier[rmf] . identifier[matrix] [ identifier[mtx_ofs] : identifier[mtx_ofs] + identifier[n] ] identifier[mtx_ofs] += identifier[n] identifier[grp_ofs] += literal[int] keyword[return] identifier[expanded]
def expand_rmf_matrix(rmf): """Expand an RMF matrix stored in compressed form. *rmf* An RMF object as might be returned by ``sherpa.astro.ui.get_rmf()``. Returns: A non-sparse RMF matrix. The Response Matrix Function (RMF) of an X-ray telescope like Chandra can be stored in a sparse format as defined in `OGIP Calibration Memo CAL/GEN/92-002 <https://heasarc.gsfc.nasa.gov/docs/heasarc/caldb/docs/memos/cal_gen_92_002/cal_gen_92_002.html>`_. For visualization and analysis purposes, it can be useful to de-sparsify the matrices stored in this way. This function does that, returning a two-dimensional Numpy array. """ n_chan = rmf.e_min.size n_energy = rmf.n_grp.size expanded = np.zeros((n_energy, n_chan)) mtx_ofs = 0 grp_ofs = 0 for i in range(n_energy): for j in range(rmf.n_grp[i]): f = rmf.f_chan[grp_ofs] n = rmf.n_chan[grp_ofs] expanded[i, f:f + n] = rmf.matrix[mtx_ofs:mtx_ofs + n] mtx_ofs += n grp_ofs += 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['i']] return expanded
def script_to_address(script, vbyte=0): ''' Like script_to_address but supports altcoins Copied 2015-10-02 from https://github.com/mflaxman/pybitcointools/blob/faf56c53148989ea390238c3c4541a6ae1d601f5/bitcoin/transaction.py#L224-L236 ''' if re.match('^[0-9a-fA-F]*$', script): script = binascii.unhexlify(script) if script[:3] == b'\x76\xa9\x14' and script[-2:] == b'\x88\xac' and len(script) == 25: return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses else: if vbyte in [111, 196]: # Testnet scripthash_byte = 196 else: scripthash_byte = vbyte # BIP0016 scripthash addresses return bin_to_b58check(script[2:-1], scripthash_byte)
def function[script_to_address, parameter[script, vbyte]]: constant[ Like script_to_address but supports altcoins Copied 2015-10-02 from https://github.com/mflaxman/pybitcointools/blob/faf56c53148989ea390238c3c4541a6ae1d601f5/bitcoin/transaction.py#L224-L236 ] if call[name[re].match, parameter[constant[^[0-9a-fA-F]*$], name[script]]] begin[:] variable[script] assign[=] call[name[binascii].unhexlify, parameter[name[script]]] if <ast.BoolOp object at 0x7da1b07f6bc0> begin[:] return[call[name[bin_to_b58check], parameter[call[name[script]][<ast.Slice object at 0x7da1b07f62f0>], name[vbyte]]]]
keyword[def] identifier[script_to_address] ( identifier[script] , identifier[vbyte] = literal[int] ): literal[string] keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[script] ): identifier[script] = identifier[binascii] . identifier[unhexlify] ( identifier[script] ) keyword[if] identifier[script] [: literal[int] ]== literal[string] keyword[and] identifier[script] [- literal[int] :]== literal[string] keyword[and] identifier[len] ( identifier[script] )== literal[int] : keyword[return] identifier[bin_to_b58check] ( identifier[script] [ literal[int] :- literal[int] ], identifier[vbyte] ) keyword[else] : keyword[if] identifier[vbyte] keyword[in] [ literal[int] , literal[int] ]: identifier[scripthash_byte] = literal[int] keyword[else] : identifier[scripthash_byte] = identifier[vbyte] keyword[return] identifier[bin_to_b58check] ( identifier[script] [ literal[int] :- literal[int] ], identifier[scripthash_byte] )
def script_to_address(script, vbyte=0): """ Like script_to_address but supports altcoins Copied 2015-10-02 from https://github.com/mflaxman/pybitcointools/blob/faf56c53148989ea390238c3c4541a6ae1d601f5/bitcoin/transaction.py#L224-L236 """ if re.match('^[0-9a-fA-F]*$', script): script = binascii.unhexlify(script) # depends on [control=['if'], data=[]] if script[:3] == b'v\xa9\x14' and script[-2:] == b'\x88\xac' and (len(script) == 25): return bin_to_b58check(script[3:-2], vbyte) # pubkey hash addresses # depends on [control=['if'], data=[]] else: if vbyte in [111, 196]: # Testnet scripthash_byte = 196 # depends on [control=['if'], data=[]] else: scripthash_byte = vbyte # BIP0016 scripthash addresses return bin_to_b58check(script[2:-1], scripthash_byte)
def version(self): """Returns the device's version. The device's version is returned as a string of the format: M.mr where ``M`` is major number, ``m`` is minor number, and ``r`` is revision character. Args: self (JLink): the ``JLink`` instance Returns: Device version string. """ version = int(self._dll.JLINKARM_GetDLLVersion()) major = version / 10000 minor = (version / 100) % 100 rev = version % 100 rev = '' if rev == 0 else chr(rev + ord('a') - 1) return '%d.%02d%s' % (major, minor, rev)
def function[version, parameter[self]]: constant[Returns the device's version. The device's version is returned as a string of the format: M.mr where ``M`` is major number, ``m`` is minor number, and ``r`` is revision character. Args: self (JLink): the ``JLink`` instance Returns: Device version string. ] variable[version] assign[=] call[name[int], parameter[call[name[self]._dll.JLINKARM_GetDLLVersion, parameter[]]]] variable[major] assign[=] binary_operation[name[version] / constant[10000]] variable[minor] assign[=] binary_operation[binary_operation[name[version] / constant[100]] <ast.Mod object at 0x7da2590d6920> constant[100]] variable[rev] assign[=] binary_operation[name[version] <ast.Mod object at 0x7da2590d6920> constant[100]] variable[rev] assign[=] <ast.IfExp object at 0x7da1b17dc340> return[binary_operation[constant[%d.%02d%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b17dda20>, <ast.Name object at 0x7da1b17ddf90>, <ast.Name object at 0x7da1b17de800>]]]]
keyword[def] identifier[version] ( identifier[self] ): literal[string] identifier[version] = identifier[int] ( identifier[self] . identifier[_dll] . identifier[JLINKARM_GetDLLVersion] ()) identifier[major] = identifier[version] / literal[int] identifier[minor] =( identifier[version] / literal[int] )% literal[int] identifier[rev] = identifier[version] % literal[int] identifier[rev] = literal[string] keyword[if] identifier[rev] == literal[int] keyword[else] identifier[chr] ( identifier[rev] + identifier[ord] ( literal[string] )- literal[int] ) keyword[return] literal[string] %( identifier[major] , identifier[minor] , identifier[rev] )
def version(self): """Returns the device's version. The device's version is returned as a string of the format: M.mr where ``M`` is major number, ``m`` is minor number, and ``r`` is revision character. Args: self (JLink): the ``JLink`` instance Returns: Device version string. """ version = int(self._dll.JLINKARM_GetDLLVersion()) major = version / 10000 minor = version / 100 % 100 rev = version % 100 rev = '' if rev == 0 else chr(rev + ord('a') - 1) return '%d.%02d%s' % (major, minor, rev)
def _fetch_objects(self, key, value): """Fetch Multiple linked objects""" return self.to_cls.query.filter(**{key: value})
def function[_fetch_objects, parameter[self, key, value]]: constant[Fetch Multiple linked objects] return[call[name[self].to_cls.query.filter, parameter[]]]
keyword[def] identifier[_fetch_objects] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] keyword[return] identifier[self] . identifier[to_cls] . identifier[query] . identifier[filter] (**{ identifier[key] : identifier[value] })
def _fetch_objects(self, key, value): """Fetch Multiple linked objects""" return self.to_cls.query.filter(**{key: value})
def tempo_account_add_account(self, data=None): """ Creates Account, adding new Account requires the Manage Accounts Permission. :param data: String then it will convert to json :return: """ url = 'rest/tempo-accounts/1/account/' if data is None: return """Please, provide data e.g. {name: "12312312321", key: "1231231232", lead: {name: "myusername"}, } detail info: http://developer.tempo.io/doc/accounts/api/rest/latest/#-700314780 """ return self.post(url, data=data)
def function[tempo_account_add_account, parameter[self, data]]: constant[ Creates Account, adding new Account requires the Manage Accounts Permission. :param data: String then it will convert to json :return: ] variable[url] assign[=] constant[rest/tempo-accounts/1/account/] if compare[name[data] is constant[None]] begin[:] return[constant[Please, provide data e.g. {name: "12312312321", key: "1231231232", lead: {name: "myusername"}, } detail info: http://developer.tempo.io/doc/accounts/api/rest/latest/#-700314780 ]] return[call[name[self].post, parameter[name[url]]]]
keyword[def] identifier[tempo_account_add_account] ( identifier[self] , identifier[data] = keyword[None] ): literal[string] identifier[url] = literal[string] keyword[if] identifier[data] keyword[is] keyword[None] : keyword[return] literal[string] keyword[return] identifier[self] . identifier[post] ( identifier[url] , identifier[data] = identifier[data] )
def tempo_account_add_account(self, data=None): """ Creates Account, adding new Account requires the Manage Accounts Permission. :param data: String then it will convert to json :return: """ url = 'rest/tempo-accounts/1/account/' if data is None: return 'Please, provide data e.g.\n {name: "12312312321",\n key: "1231231232",\n lead: {name: "myusername"},\n }\n detail info: http://developer.tempo.io/doc/accounts/api/rest/latest/#-700314780\n ' # depends on [control=['if'], data=[]] return self.post(url, data=data)
def create_objective(self, objective_form=None): """Creates a new Objective. arg: objectiveForm (osid.learning.ObjectiveForm): the form for this Objective return: (osid.learning.Objective) - the new Objective raise: IllegalState - objectiveForm already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - objectiveForm is null raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - objectiveForm did not originate from get_objective_form_for_create() compliance: mandatory - This method must be implemented. """ if objective_form is None: raise NullArgument() if not isinstance(objective_form, abc_learning_objects.ObjectiveForm): raise InvalidArgument('argument type is not an ObjectiveForm') if objective_form.is_for_update(): raise InvalidArgument('form is for update only, not create') try: if self._forms[objective_form.get_id().get_identifier()] == CREATED: raise IllegalState('form already used in a create transaction') except KeyError: raise Unsupported('form did not originate from this session') if not objective_form.is_valid(): raise InvalidArgument('one or more of the form elements is invalid') url_path = construct_url('objectives', bank_id=self._catalog_idstr) try: result = self._post_request(url_path, objective_form._my_map) except Exception: raise # OperationFailed self._forms[objective_form.get_id().get_identifier()] = CREATED return objects.Objective(result)
def function[create_objective, parameter[self, objective_form]]: constant[Creates a new Objective. arg: objectiveForm (osid.learning.ObjectiveForm): the form for this Objective return: (osid.learning.Objective) - the new Objective raise: IllegalState - objectiveForm already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - objectiveForm is null raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - objectiveForm did not originate from get_objective_form_for_create() compliance: mandatory - This method must be implemented. ] if compare[name[objective_form] is constant[None]] begin[:] <ast.Raise object at 0x7da204622aa0> if <ast.UnaryOp object at 0x7da2046228f0> begin[:] <ast.Raise object at 0x7da204623f40> if call[name[objective_form].is_for_update, parameter[]] begin[:] <ast.Raise object at 0x7da204622ef0> <ast.Try object at 0x7da2046220b0> if <ast.UnaryOp object at 0x7da204620a90> begin[:] <ast.Raise object at 0x7da204622e60> variable[url_path] assign[=] call[name[construct_url], parameter[constant[objectives]]] <ast.Try object at 0x7da204622140> call[name[self]._forms][call[call[name[objective_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] name[CREATED] return[call[name[objects].Objective, parameter[name[result]]]]
keyword[def] identifier[create_objective] ( identifier[self] , identifier[objective_form] = keyword[None] ): literal[string] keyword[if] identifier[objective_form] keyword[is] keyword[None] : keyword[raise] identifier[NullArgument] () keyword[if] keyword[not] identifier[isinstance] ( identifier[objective_form] , identifier[abc_learning_objects] . identifier[ObjectiveForm] ): keyword[raise] identifier[InvalidArgument] ( literal[string] ) keyword[if] identifier[objective_form] . identifier[is_for_update] (): keyword[raise] identifier[InvalidArgument] ( literal[string] ) keyword[try] : keyword[if] identifier[self] . identifier[_forms] [ identifier[objective_form] . identifier[get_id] (). identifier[get_identifier] ()]== identifier[CREATED] : keyword[raise] identifier[IllegalState] ( literal[string] ) keyword[except] identifier[KeyError] : keyword[raise] identifier[Unsupported] ( literal[string] ) keyword[if] keyword[not] identifier[objective_form] . identifier[is_valid] (): keyword[raise] identifier[InvalidArgument] ( literal[string] ) identifier[url_path] = identifier[construct_url] ( literal[string] , identifier[bank_id] = identifier[self] . identifier[_catalog_idstr] ) keyword[try] : identifier[result] = identifier[self] . identifier[_post_request] ( identifier[url_path] , identifier[objective_form] . identifier[_my_map] ) keyword[except] identifier[Exception] : keyword[raise] identifier[self] . identifier[_forms] [ identifier[objective_form] . identifier[get_id] (). identifier[get_identifier] ()]= identifier[CREATED] keyword[return] identifier[objects] . identifier[Objective] ( identifier[result] )
def create_objective(self, objective_form=None): """Creates a new Objective. arg: objectiveForm (osid.learning.ObjectiveForm): the form for this Objective return: (osid.learning.Objective) - the new Objective raise: IllegalState - objectiveForm already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - objectiveForm is null raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - objectiveForm did not originate from get_objective_form_for_create() compliance: mandatory - This method must be implemented. """ if objective_form is None: raise NullArgument() # depends on [control=['if'], data=[]] if not isinstance(objective_form, abc_learning_objects.ObjectiveForm): raise InvalidArgument('argument type is not an ObjectiveForm') # depends on [control=['if'], data=[]] if objective_form.is_for_update(): raise InvalidArgument('form is for update only, not create') # depends on [control=['if'], data=[]] try: if self._forms[objective_form.get_id().get_identifier()] == CREATED: raise IllegalState('form already used in a create transaction') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except KeyError: raise Unsupported('form did not originate from this session') # depends on [control=['except'], data=[]] if not objective_form.is_valid(): raise InvalidArgument('one or more of the form elements is invalid') # depends on [control=['if'], data=[]] url_path = construct_url('objectives', bank_id=self._catalog_idstr) try: result = self._post_request(url_path, objective_form._my_map) # depends on [control=['try'], data=[]] except Exception: raise # OperationFailed # depends on [control=['except'], data=[]] self._forms[objective_form.get_id().get_identifier()] = CREATED return objects.Objective(result)
def naturalize_thing(self, string): """ Make a naturalized version of a general string, not a person's name. e.g., title of a book, a band's name, etc. string -- a lowercase string. """ # Things we want to move to the back of the string: articles = [ 'a', 'an', 'the', 'un', 'une', 'le', 'la', 'les', "l'", "l’", 'ein', 'eine', 'der', 'die', 'das', 'una', 'el', 'los', 'las', ] sort_string = string parts = string.split(' ') if len(parts) > 1 and parts[0] in articles: if parts[0] != parts[1]: # Don't do this if the name is 'The The' or 'La La Land'. # Makes 'long blondes, the': sort_string = '{}, {}'.format(' '.join(parts[1:]), parts[0]) sort_string = self._naturalize_numbers(sort_string) return sort_string
def function[naturalize_thing, parameter[self, string]]: constant[ Make a naturalized version of a general string, not a person's name. e.g., title of a book, a band's name, etc. string -- a lowercase string. ] variable[articles] assign[=] list[[<ast.Constant object at 0x7da20e9b2bc0>, <ast.Constant object at 0x7da20e9b0220>, <ast.Constant object at 0x7da20e9b05e0>, <ast.Constant object at 0x7da20e9b1ba0>, <ast.Constant object at 0x7da20e9b2d70>, <ast.Constant object at 0x7da20e9b2200>, <ast.Constant object at 0x7da20e9b2b30>, <ast.Constant object at 0x7da20e9b35b0>, <ast.Constant object at 0x7da20e9b13c0>, <ast.Constant object at 0x7da20e9b07f0>, <ast.Constant object at 0x7da20e9b26e0>, <ast.Constant object at 0x7da20e9b2710>, <ast.Constant object at 0x7da20e9b2440>, <ast.Constant object at 0x7da20e9b00a0>, <ast.Constant object at 0x7da20e9b3b50>, <ast.Constant object at 0x7da20e9b3820>, <ast.Constant object at 0x7da20e9b2c80>, <ast.Constant object at 0x7da20e9b06a0>, <ast.Constant object at 0x7da20e9b39d0>]] variable[sort_string] assign[=] name[string] variable[parts] assign[=] call[name[string].split, parameter[constant[ ]]] if <ast.BoolOp object at 0x7da20e9b1ae0> begin[:] if compare[call[name[parts]][constant[0]] not_equal[!=] call[name[parts]][constant[1]]] begin[:] variable[sort_string] assign[=] call[constant[{}, {}].format, parameter[call[constant[ ].join, parameter[call[name[parts]][<ast.Slice object at 0x7da20e956b60>]]], call[name[parts]][constant[0]]]] variable[sort_string] assign[=] call[name[self]._naturalize_numbers, parameter[name[sort_string]]] return[name[sort_string]]
keyword[def] identifier[naturalize_thing] ( identifier[self] , identifier[string] ): literal[string] identifier[articles] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , ] identifier[sort_string] = identifier[string] identifier[parts] = identifier[string] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[parts] )> literal[int] keyword[and] identifier[parts] [ literal[int] ] keyword[in] identifier[articles] : keyword[if] identifier[parts] [ literal[int] ]!= identifier[parts] [ literal[int] ]: identifier[sort_string] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[parts] [ literal[int] :]), identifier[parts] [ literal[int] ]) identifier[sort_string] = identifier[self] . identifier[_naturalize_numbers] ( identifier[sort_string] ) keyword[return] identifier[sort_string]
def naturalize_thing(self, string): """ Make a naturalized version of a general string, not a person's name. e.g., title of a book, a band's name, etc. string -- a lowercase string. """ # Things we want to move to the back of the string: articles = ['a', 'an', 'the', 'un', 'une', 'le', 'la', 'les', "l'", 'l’', 'ein', 'eine', 'der', 'die', 'das', 'una', 'el', 'los', 'las'] sort_string = string parts = string.split(' ') if len(parts) > 1 and parts[0] in articles: if parts[0] != parts[1]: # Don't do this if the name is 'The The' or 'La La Land'. # Makes 'long blondes, the': sort_string = '{}, {}'.format(' '.join(parts[1:]), parts[0]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] sort_string = self._naturalize_numbers(sort_string) return sort_string
def switches(self): """ List of all switches currently registered. """ results = [ switch for name, switch in self.storage.iteritems() if name.startswith(self.__joined_namespace) ] return results
def function[switches, parameter[self]]: constant[ List of all switches currently registered. ] variable[results] assign[=] <ast.ListComp object at 0x7da18f09d150> return[name[results]]
keyword[def] identifier[switches] ( identifier[self] ): literal[string] identifier[results] =[ identifier[switch] keyword[for] identifier[name] , identifier[switch] keyword[in] identifier[self] . identifier[storage] . identifier[iteritems] () keyword[if] identifier[name] . identifier[startswith] ( identifier[self] . identifier[__joined_namespace] ) ] keyword[return] identifier[results]
def switches(self): """ List of all switches currently registered. """ results = [switch for (name, switch) in self.storage.iteritems() if name.startswith(self.__joined_namespace)] return results
def provider(cls, note, provider=None, name=False): """Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider) """ def decorator(provider): if inspect.isgeneratorfunction(provider): # Automatically adapt generator functions provider = cls.generator_provider.bind( provider, support_name=name) return decorator(provider) cls.register(note, provider) return provider if provider is not None: decorator(provider) else: return decorator
def function[provider, parameter[cls, note, provider, name]]: constant[Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider) ] def function[decorator, parameter[provider]]: if call[name[inspect].isgeneratorfunction, parameter[name[provider]]] begin[:] variable[provider] assign[=] call[name[cls].generator_provider.bind, parameter[name[provider]]] return[call[name[decorator], parameter[name[provider]]]] call[name[cls].register, parameter[name[note], name[provider]]] return[name[provider]] if compare[name[provider] is_not constant[None]] begin[:] call[name[decorator], parameter[name[provider]]]
keyword[def] identifier[provider] ( identifier[cls] , identifier[note] , identifier[provider] = keyword[None] , identifier[name] = keyword[False] ): literal[string] keyword[def] identifier[decorator] ( identifier[provider] ): keyword[if] identifier[inspect] . identifier[isgeneratorfunction] ( identifier[provider] ): identifier[provider] = identifier[cls] . identifier[generator_provider] . identifier[bind] ( identifier[provider] , identifier[support_name] = identifier[name] ) keyword[return] identifier[decorator] ( identifier[provider] ) identifier[cls] . identifier[register] ( identifier[note] , identifier[provider] ) keyword[return] identifier[provider] keyword[if] identifier[provider] keyword[is] keyword[not] keyword[None] : identifier[decorator] ( identifier[provider] ) keyword[else] : keyword[return] identifier[decorator]
def provider(cls, note, provider=None, name=False): """Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Provider): def get(self, name=None): if name is None: name = 'world' return 'Hello, {}!'.format(name) Simple generator:: @Injector.provider('answer') def answer(): yield 42 If a generator supports get with a name argument:: @Injector.provider('spam', name=True) def spam(): count_str = yield 'spam' while True: count_str = yield 'spam' * int(count_str) Registration can be a decorator or a direct method call:: Injector.provider('hello', HelloProvider) """ def decorator(provider): if inspect.isgeneratorfunction(provider): # Automatically adapt generator functions provider = cls.generator_provider.bind(provider, support_name=name) return decorator(provider) # depends on [control=['if'], data=[]] cls.register(note, provider) return provider if provider is not None: decorator(provider) # depends on [control=['if'], data=['provider']] else: return decorator
def remove_all_annotations_from_tier(self, id_tier, clean=True): """remove all annotations from a tier :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. """ for aid in self.tiers[id_tier][0]: del(self.annotations[aid]) for aid in self.tiers[id_tier][1]: del(self.annotations[aid]) self.tiers[id_tier][0].clear() self.tiers[id_tier][1].clear() if clean: self.clean_time_slots()
def function[remove_all_annotations_from_tier, parameter[self, id_tier, clean]]: constant[remove all annotations from a tier :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. ] for taget[name[aid]] in starred[call[call[name[self].tiers][name[id_tier]]][constant[0]]] begin[:] <ast.Delete object at 0x7da1b0216fb0> for taget[name[aid]] in starred[call[call[name[self].tiers][name[id_tier]]][constant[1]]] begin[:] <ast.Delete object at 0x7da1b0215de0> call[call[call[name[self].tiers][name[id_tier]]][constant[0]].clear, parameter[]] call[call[call[name[self].tiers][name[id_tier]]][constant[1]].clear, parameter[]] if name[clean] begin[:] call[name[self].clean_time_slots, parameter[]]
keyword[def] identifier[remove_all_annotations_from_tier] ( identifier[self] , identifier[id_tier] , identifier[clean] = keyword[True] ): literal[string] keyword[for] identifier[aid] keyword[in] identifier[self] . identifier[tiers] [ identifier[id_tier] ][ literal[int] ]: keyword[del] ( identifier[self] . identifier[annotations] [ identifier[aid] ]) keyword[for] identifier[aid] keyword[in] identifier[self] . identifier[tiers] [ identifier[id_tier] ][ literal[int] ]: keyword[del] ( identifier[self] . identifier[annotations] [ identifier[aid] ]) identifier[self] . identifier[tiers] [ identifier[id_tier] ][ literal[int] ]. identifier[clear] () identifier[self] . identifier[tiers] [ identifier[id_tier] ][ literal[int] ]. identifier[clear] () keyword[if] identifier[clean] : identifier[self] . identifier[clean_time_slots] ()
def remove_all_annotations_from_tier(self, id_tier, clean=True): """remove all annotations from a tier :param str id_tier: Name of the tier. :raises KeyError: If the tier is non existent. """ for aid in self.tiers[id_tier][0]: del self.annotations[aid] # depends on [control=['for'], data=['aid']] for aid in self.tiers[id_tier][1]: del self.annotations[aid] # depends on [control=['for'], data=['aid']] self.tiers[id_tier][0].clear() self.tiers[id_tier][1].clear() if clean: self.clean_time_slots() # depends on [control=['if'], data=[]]
def minutes_for_sessions_in_range(self, start_session_label, end_session_label): """ Returns all the minutes for all the sessions from the given start session label to the given end session label, inclusive. Parameters ---------- start_session_label: pd.Timestamp The label of the first session in the range. end_session_label: pd.Timestamp The label of the last session in the range. Returns ------- pd.DatetimeIndex The minutes in the desired range. """ first_minute, _ = self.open_and_close_for_session(start_session_label) _, last_minute = self.open_and_close_for_session(end_session_label) return self.minutes_in_range(first_minute, last_minute)
def function[minutes_for_sessions_in_range, parameter[self, start_session_label, end_session_label]]: constant[ Returns all the minutes for all the sessions from the given start session label to the given end session label, inclusive. Parameters ---------- start_session_label: pd.Timestamp The label of the first session in the range. end_session_label: pd.Timestamp The label of the last session in the range. Returns ------- pd.DatetimeIndex The minutes in the desired range. ] <ast.Tuple object at 0x7da18fe90af0> assign[=] call[name[self].open_and_close_for_session, parameter[name[start_session_label]]] <ast.Tuple object at 0x7da18fe90250> assign[=] call[name[self].open_and_close_for_session, parameter[name[end_session_label]]] return[call[name[self].minutes_in_range, parameter[name[first_minute], name[last_minute]]]]
keyword[def] identifier[minutes_for_sessions_in_range] ( identifier[self] , identifier[start_session_label] , identifier[end_session_label] ): literal[string] identifier[first_minute] , identifier[_] = identifier[self] . identifier[open_and_close_for_session] ( identifier[start_session_label] ) identifier[_] , identifier[last_minute] = identifier[self] . identifier[open_and_close_for_session] ( identifier[end_session_label] ) keyword[return] identifier[self] . identifier[minutes_in_range] ( identifier[first_minute] , identifier[last_minute] )
def minutes_for_sessions_in_range(self, start_session_label, end_session_label): """ Returns all the minutes for all the sessions from the given start session label to the given end session label, inclusive. Parameters ---------- start_session_label: pd.Timestamp The label of the first session in the range. end_session_label: pd.Timestamp The label of the last session in the range. Returns ------- pd.DatetimeIndex The minutes in the desired range. """ (first_minute, _) = self.open_and_close_for_session(start_session_label) (_, last_minute) = self.open_and_close_for_session(end_session_label) return self.minutes_in_range(first_minute, last_minute)
def send_frame(self, cmd, headers=None, body=''): """ Encode and send a stomp frame through the underlying transport: :param str cmd: the protocol command :param dict headers: a map of headers to include in the frame :param body: the content of the message """ if cmd != CMD_CONNECT: if headers is None: headers = {} self._escape_headers(headers) frame = utils.Frame(cmd, headers, body) self.transport.transmit(frame)
def function[send_frame, parameter[self, cmd, headers, body]]: constant[ Encode and send a stomp frame through the underlying transport: :param str cmd: the protocol command :param dict headers: a map of headers to include in the frame :param body: the content of the message ] if compare[name[cmd] not_equal[!=] name[CMD_CONNECT]] begin[:] if compare[name[headers] is constant[None]] begin[:] variable[headers] assign[=] dictionary[[], []] call[name[self]._escape_headers, parameter[name[headers]]] variable[frame] assign[=] call[name[utils].Frame, parameter[name[cmd], name[headers], name[body]]] call[name[self].transport.transmit, parameter[name[frame]]]
keyword[def] identifier[send_frame] ( identifier[self] , identifier[cmd] , identifier[headers] = keyword[None] , identifier[body] = literal[string] ): literal[string] keyword[if] identifier[cmd] != identifier[CMD_CONNECT] : keyword[if] identifier[headers] keyword[is] keyword[None] : identifier[headers] ={} identifier[self] . identifier[_escape_headers] ( identifier[headers] ) identifier[frame] = identifier[utils] . identifier[Frame] ( identifier[cmd] , identifier[headers] , identifier[body] ) identifier[self] . identifier[transport] . identifier[transmit] ( identifier[frame] )
def send_frame(self, cmd, headers=None, body=''): """ Encode and send a stomp frame through the underlying transport: :param str cmd: the protocol command :param dict headers: a map of headers to include in the frame :param body: the content of the message """ if cmd != CMD_CONNECT: if headers is None: headers = {} # depends on [control=['if'], data=['headers']] self._escape_headers(headers) # depends on [control=['if'], data=[]] frame = utils.Frame(cmd, headers, body) self.transport.transmit(frame)
def execute(self, command, args): """ Event firing and exception conversion around command execution. Common exceptions are run through our exception handler for pretty-printing or debugging and then converted to SystemExit so the interpretor will exit without further ado (or be caught if interactive). """ self.fire_event('precmd', command, args) try: try: result = command.run_wrap(args) except BaseException as e: self.fire_event('postcmd', command, args, exc=e) raise e else: self.fire_event('postcmd', command, args, result=result) return result except BrokenPipeError as e: _vprinterr('<dim><red>...broken pipe...</red></dim>') raise SystemExit(1) from e except KeyboardInterrupt as e: _vprinterr('<dim><red>...interrupted...</red></dim>') raise SystemExit(1) from e except SystemExit as e: if e.args and not isinstance(e.args[0], int): _vprinterr("<red>%s</red>" % e) raise SystemExit(1) from e raise e except Exception as e: self.handle_command_error(command, args, e) raise SystemExit(1) from e
def function[execute, parameter[self, command, args]]: constant[ Event firing and exception conversion around command execution. Common exceptions are run through our exception handler for pretty-printing or debugging and then converted to SystemExit so the interpretor will exit without further ado (or be caught if interactive). ] call[name[self].fire_event, parameter[constant[precmd], name[command], name[args]]] <ast.Try object at 0x7da1b162a560>
keyword[def] identifier[execute] ( identifier[self] , identifier[command] , identifier[args] ): literal[string] identifier[self] . identifier[fire_event] ( literal[string] , identifier[command] , identifier[args] ) keyword[try] : keyword[try] : identifier[result] = identifier[command] . identifier[run_wrap] ( identifier[args] ) keyword[except] identifier[BaseException] keyword[as] identifier[e] : identifier[self] . identifier[fire_event] ( literal[string] , identifier[command] , identifier[args] , identifier[exc] = identifier[e] ) keyword[raise] identifier[e] keyword[else] : identifier[self] . identifier[fire_event] ( literal[string] , identifier[command] , identifier[args] , identifier[result] = identifier[result] ) keyword[return] identifier[result] keyword[except] identifier[BrokenPipeError] keyword[as] identifier[e] : identifier[_vprinterr] ( literal[string] ) keyword[raise] identifier[SystemExit] ( literal[int] ) keyword[from] identifier[e] keyword[except] identifier[KeyboardInterrupt] keyword[as] identifier[e] : identifier[_vprinterr] ( literal[string] ) keyword[raise] identifier[SystemExit] ( literal[int] ) keyword[from] identifier[e] keyword[except] identifier[SystemExit] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[args] keyword[and] keyword[not] identifier[isinstance] ( identifier[e] . identifier[args] [ literal[int] ], identifier[int] ): identifier[_vprinterr] ( literal[string] % identifier[e] ) keyword[raise] identifier[SystemExit] ( literal[int] ) keyword[from] identifier[e] keyword[raise] identifier[e] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[handle_command_error] ( identifier[command] , identifier[args] , identifier[e] ) keyword[raise] identifier[SystemExit] ( literal[int] ) keyword[from] identifier[e]
def execute(self, command, args): """ Event firing and exception conversion around command execution. Common exceptions are run through our exception handler for pretty-printing or debugging and then converted to SystemExit so the interpretor will exit without further ado (or be caught if interactive). """ self.fire_event('precmd', command, args) try: try: result = command.run_wrap(args) # depends on [control=['try'], data=[]] except BaseException as e: self.fire_event('postcmd', command, args, exc=e) raise e # depends on [control=['except'], data=['e']] else: self.fire_event('postcmd', command, args, result=result) return result # depends on [control=['try'], data=[]] except BrokenPipeError as e: _vprinterr('<dim><red>...broken pipe...</red></dim>') raise SystemExit(1) from e # depends on [control=['except'], data=['e']] except KeyboardInterrupt as e: _vprinterr('<dim><red>...interrupted...</red></dim>') raise SystemExit(1) from e # depends on [control=['except'], data=['e']] except SystemExit as e: if e.args and (not isinstance(e.args[0], int)): _vprinterr('<red>%s</red>' % e) raise SystemExit(1) from e # depends on [control=['if'], data=[]] raise e # depends on [control=['except'], data=['e']] except Exception as e: self.handle_command_error(command, args, e) raise SystemExit(1) from e # depends on [control=['except'], data=['e']]
def getAsKmlGrid(self, session, path=None, documentName=None, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=1.0, noDataValue=None): """ Retrieve the raster as a KML document with each cell of the raster represented as a vector polygon. The result is a vector grid of raster cells. Cells with the no data value are excluded. Args: session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database. path (str, optional): Path to file where KML file will be written. Defaults to None. documentName (str, optional): Name of the KML document. This will be the name that appears in the legend. Defaults to 'Stream Network'. colorRamp (:mod:`mapkit.ColorRampGenerator.ColorRampEnum` or dict, optional): Use ColorRampEnum to select a default color ramp or a dictionary with keys 'colors' and 'interpolatedPoints' to specify a custom color ramp. The 'colors' key must be a list of RGB integer tuples (e.g.: (255, 0, 0)) and the 'interpolatedPoints' must be an integer representing the number of points to interpolate between each color given in the colors list. alpha (float, optional): Set transparency of visualization. Value between 0.0 and 1.0 where 1.0 is 100% opaque and 0.0 is 100% transparent. Defaults to 1.0. noDataValue (float, optional): The value to treat as no data when generating visualizations of rasters. Defaults to 0.0. Returns: str: KML string """ if type(self.raster) != type(None): # Set Document Name if documentName is None: try: documentName = self.filename except AttributeError: documentName = 'default' # Set no data value to default if noDataValue is None: noDataValue = self.defaultNoDataValue # Make sure the raster field is valid converter = RasterConverter(sqlAlchemyEngineOrSession=session) # Configure color ramp if isinstance(colorRamp, dict): converter.setCustomColorRamp(colorRamp['colors'], colorRamp['interpolatedPoints']) else: converter.setDefaultColorRamp(colorRamp) kmlString = converter.getAsKmlGrid(tableName=self.tableName, rasterId=self.id, rasterIdFieldName='id', rasterFieldName=self.rasterColumnName, documentName=documentName, alpha=alpha, noDataValue=noDataValue, discreet=self.discreet) if path: with open(path, 'w') as f: f.write(kmlString) return kmlString
def function[getAsKmlGrid, parameter[self, session, path, documentName, colorRamp, alpha, noDataValue]]: constant[ Retrieve the raster as a KML document with each cell of the raster represented as a vector polygon. The result is a vector grid of raster cells. Cells with the no data value are excluded. Args: session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database. path (str, optional): Path to file where KML file will be written. Defaults to None. documentName (str, optional): Name of the KML document. This will be the name that appears in the legend. Defaults to 'Stream Network'. colorRamp (:mod:`mapkit.ColorRampGenerator.ColorRampEnum` or dict, optional): Use ColorRampEnum to select a default color ramp or a dictionary with keys 'colors' and 'interpolatedPoints' to specify a custom color ramp. The 'colors' key must be a list of RGB integer tuples (e.g.: (255, 0, 0)) and the 'interpolatedPoints' must be an integer representing the number of points to interpolate between each color given in the colors list. alpha (float, optional): Set transparency of visualization. Value between 0.0 and 1.0 where 1.0 is 100% opaque and 0.0 is 100% transparent. Defaults to 1.0. noDataValue (float, optional): The value to treat as no data when generating visualizations of rasters. Defaults to 0.0. Returns: str: KML string ] if compare[call[name[type], parameter[name[self].raster]] not_equal[!=] call[name[type], parameter[constant[None]]]] begin[:] if compare[name[documentName] is constant[None]] begin[:] <ast.Try object at 0x7da207f01540> if compare[name[noDataValue] is constant[None]] begin[:] variable[noDataValue] assign[=] name[self].defaultNoDataValue variable[converter] assign[=] call[name[RasterConverter], parameter[]] if call[name[isinstance], parameter[name[colorRamp], name[dict]]] begin[:] call[name[converter].setCustomColorRamp, parameter[call[name[colorRamp]][constant[colors]], call[name[colorRamp]][constant[interpolatedPoints]]]] variable[kmlString] assign[=] call[name[converter].getAsKmlGrid, parameter[]] if name[path] begin[:] with call[name[open], parameter[name[path], constant[w]]] begin[:] call[name[f].write, parameter[name[kmlString]]] return[name[kmlString]]
keyword[def] identifier[getAsKmlGrid] ( identifier[self] , identifier[session] , identifier[path] = keyword[None] , identifier[documentName] = keyword[None] , identifier[colorRamp] = identifier[ColorRampEnum] . identifier[COLOR_RAMP_HUE] , identifier[alpha] = literal[int] , identifier[noDataValue] = keyword[None] ): literal[string] keyword[if] identifier[type] ( identifier[self] . identifier[raster] )!= identifier[type] ( keyword[None] ): keyword[if] identifier[documentName] keyword[is] keyword[None] : keyword[try] : identifier[documentName] = identifier[self] . identifier[filename] keyword[except] identifier[AttributeError] : identifier[documentName] = literal[string] keyword[if] identifier[noDataValue] keyword[is] keyword[None] : identifier[noDataValue] = identifier[self] . identifier[defaultNoDataValue] identifier[converter] = identifier[RasterConverter] ( identifier[sqlAlchemyEngineOrSession] = identifier[session] ) keyword[if] identifier[isinstance] ( identifier[colorRamp] , identifier[dict] ): identifier[converter] . identifier[setCustomColorRamp] ( identifier[colorRamp] [ literal[string] ], identifier[colorRamp] [ literal[string] ]) keyword[else] : identifier[converter] . identifier[setDefaultColorRamp] ( identifier[colorRamp] ) identifier[kmlString] = identifier[converter] . identifier[getAsKmlGrid] ( identifier[tableName] = identifier[self] . identifier[tableName] , identifier[rasterId] = identifier[self] . identifier[id] , identifier[rasterIdFieldName] = literal[string] , identifier[rasterFieldName] = identifier[self] . identifier[rasterColumnName] , identifier[documentName] = identifier[documentName] , identifier[alpha] = identifier[alpha] , identifier[noDataValue] = identifier[noDataValue] , identifier[discreet] = identifier[self] . identifier[discreet] ) keyword[if] identifier[path] : keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[kmlString] ) keyword[return] identifier[kmlString]
def getAsKmlGrid(self, session, path=None, documentName=None, colorRamp=ColorRampEnum.COLOR_RAMP_HUE, alpha=1.0, noDataValue=None): """ Retrieve the raster as a KML document with each cell of the raster represented as a vector polygon. The result is a vector grid of raster cells. Cells with the no data value are excluded. Args: session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database. path (str, optional): Path to file where KML file will be written. Defaults to None. documentName (str, optional): Name of the KML document. This will be the name that appears in the legend. Defaults to 'Stream Network'. colorRamp (:mod:`mapkit.ColorRampGenerator.ColorRampEnum` or dict, optional): Use ColorRampEnum to select a default color ramp or a dictionary with keys 'colors' and 'interpolatedPoints' to specify a custom color ramp. The 'colors' key must be a list of RGB integer tuples (e.g.: (255, 0, 0)) and the 'interpolatedPoints' must be an integer representing the number of points to interpolate between each color given in the colors list. alpha (float, optional): Set transparency of visualization. Value between 0.0 and 1.0 where 1.0 is 100% opaque and 0.0 is 100% transparent. Defaults to 1.0. noDataValue (float, optional): The value to treat as no data when generating visualizations of rasters. Defaults to 0.0. Returns: str: KML string """ if type(self.raster) != type(None): # Set Document Name if documentName is None: try: documentName = self.filename # depends on [control=['try'], data=[]] except AttributeError: documentName = 'default' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['documentName']] # Set no data value to default if noDataValue is None: noDataValue = self.defaultNoDataValue # depends on [control=['if'], data=['noDataValue']] # Make sure the raster field is valid converter = RasterConverter(sqlAlchemyEngineOrSession=session) # Configure color ramp if isinstance(colorRamp, dict): converter.setCustomColorRamp(colorRamp['colors'], colorRamp['interpolatedPoints']) # depends on [control=['if'], data=[]] else: converter.setDefaultColorRamp(colorRamp) kmlString = converter.getAsKmlGrid(tableName=self.tableName, rasterId=self.id, rasterIdFieldName='id', rasterFieldName=self.rasterColumnName, documentName=documentName, alpha=alpha, noDataValue=noDataValue, discreet=self.discreet) if path: with open(path, 'w') as f: f.write(kmlString) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] return kmlString # depends on [control=['if'], data=[]]
def compute_distance(a, b): ''' Computes a modified Levenshtein distance between two strings, comparing the lowercase versions of each string and accounting for QWERTY distance. Arguments: - a (str) String to compare to 'b' - b (str) String to compare to 'a' Returns: - (int) Number representing closeness of 'a' and 'b' (lower is better) ''' # check simple cases first if not a: return len(b) if not b: return len(a) if a == b or str.lower(a) == str.lower(b): return 0 # lowercase each string a = str.lower(a) b = str.lower(b) # create empty vectors to store costs vector_1 = [-1] * (len(b) + 1) vector_2 = [-1] * (len(b) + 1) # set default values for i in range(len(vector_1)): vector_1[i] = i # compute distance for i in range(len(a)): vector_2[0] = i + 1 for j in range(len(b)): penalty = 0 if a[i] == b[j] else compute_qwerty_distance(a[i], b[j]) vector_2[j + 1] = min(vector_2[j] + 1, vector_1[j + 1] + 1, vector_1[j] + penalty) for j in range(len(vector_1)): vector_1[j] = vector_2[j] return vector_2[len(b)]
def function[compute_distance, parameter[a, b]]: constant[ Computes a modified Levenshtein distance between two strings, comparing the lowercase versions of each string and accounting for QWERTY distance. Arguments: - a (str) String to compare to 'b' - b (str) String to compare to 'a' Returns: - (int) Number representing closeness of 'a' and 'b' (lower is better) ] if <ast.UnaryOp object at 0x7da204620a60> begin[:] return[call[name[len], parameter[name[b]]]] if <ast.UnaryOp object at 0x7da204622e00> begin[:] return[call[name[len], parameter[name[a]]]] if <ast.BoolOp object at 0x7da204620220> begin[:] return[constant[0]] variable[a] assign[=] call[name[str].lower, parameter[name[a]]] variable[b] assign[=] call[name[str].lower, parameter[name[b]]] variable[vector_1] assign[=] binary_operation[list[[<ast.UnaryOp object at 0x7da204622530>]] * binary_operation[call[name[len], parameter[name[b]]] + constant[1]]] variable[vector_2] assign[=] binary_operation[list[[<ast.UnaryOp object at 0x7da2046223e0>]] * binary_operation[call[name[len], parameter[name[b]]] + constant[1]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[vector_1]]]]]] begin[:] call[name[vector_1]][name[i]] assign[=] name[i] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[a]]]]]] begin[:] call[name[vector_2]][constant[0]] assign[=] binary_operation[name[i] + constant[1]] for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[b]]]]]] begin[:] variable[penalty] assign[=] <ast.IfExp object at 0x7da18dc98670> call[name[vector_2]][binary_operation[name[j] + constant[1]]] assign[=] call[name[min], parameter[binary_operation[call[name[vector_2]][name[j]] + constant[1]], binary_operation[call[name[vector_1]][binary_operation[name[j] + constant[1]]] + constant[1]], binary_operation[call[name[vector_1]][name[j]] + name[penalty]]]] for taget[name[j]] in starred[call[name[range], parameter[call[name[len], parameter[name[vector_1]]]]]] begin[:] call[name[vector_1]][name[j]] assign[=] call[name[vector_2]][name[j]] return[call[name[vector_2]][call[name[len], parameter[name[b]]]]]
keyword[def] identifier[compute_distance] ( identifier[a] , identifier[b] ): literal[string] keyword[if] keyword[not] identifier[a] : keyword[return] identifier[len] ( identifier[b] ) keyword[if] keyword[not] identifier[b] : keyword[return] identifier[len] ( identifier[a] ) keyword[if] identifier[a] == identifier[b] keyword[or] identifier[str] . identifier[lower] ( identifier[a] )== identifier[str] . identifier[lower] ( identifier[b] ): keyword[return] literal[int] identifier[a] = identifier[str] . identifier[lower] ( identifier[a] ) identifier[b] = identifier[str] . identifier[lower] ( identifier[b] ) identifier[vector_1] =[- literal[int] ]*( identifier[len] ( identifier[b] )+ literal[int] ) identifier[vector_2] =[- literal[int] ]*( identifier[len] ( identifier[b] )+ literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[vector_1] )): identifier[vector_1] [ identifier[i] ]= identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[a] )): identifier[vector_2] [ literal[int] ]= identifier[i] + literal[int] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[b] )): identifier[penalty] = literal[int] keyword[if] identifier[a] [ identifier[i] ]== identifier[b] [ identifier[j] ] keyword[else] identifier[compute_qwerty_distance] ( identifier[a] [ identifier[i] ], identifier[b] [ identifier[j] ]) identifier[vector_2] [ identifier[j] + literal[int] ]= identifier[min] ( identifier[vector_2] [ identifier[j] ]+ literal[int] , identifier[vector_1] [ identifier[j] + literal[int] ]+ literal[int] , identifier[vector_1] [ identifier[j] ]+ identifier[penalty] ) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[vector_1] )): identifier[vector_1] [ identifier[j] ]= identifier[vector_2] [ identifier[j] ] keyword[return] identifier[vector_2] [ identifier[len] ( identifier[b] )]
def compute_distance(a, b): """ Computes a modified Levenshtein distance between two strings, comparing the lowercase versions of each string and accounting for QWERTY distance. Arguments: - a (str) String to compare to 'b' - b (str) String to compare to 'a' Returns: - (int) Number representing closeness of 'a' and 'b' (lower is better) """ # check simple cases first if not a: return len(b) # depends on [control=['if'], data=[]] if not b: return len(a) # depends on [control=['if'], data=[]] if a == b or str.lower(a) == str.lower(b): return 0 # depends on [control=['if'], data=[]] # lowercase each string a = str.lower(a) b = str.lower(b) # create empty vectors to store costs vector_1 = [-1] * (len(b) + 1) vector_2 = [-1] * (len(b) + 1) # set default values for i in range(len(vector_1)): vector_1[i] = i # depends on [control=['for'], data=['i']] # compute distance for i in range(len(a)): vector_2[0] = i + 1 for j in range(len(b)): penalty = 0 if a[i] == b[j] else compute_qwerty_distance(a[i], b[j]) vector_2[j + 1] = min(vector_2[j] + 1, vector_1[j + 1] + 1, vector_1[j] + penalty) # depends on [control=['for'], data=['j']] for j in range(len(vector_1)): vector_1[j] = vector_2[j] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] return vector_2[len(b)]
def get(self, key, prompt_default='', prompt_help=''): """ Return the value for key from the environment or keyring. The keyring value is resolved from a local namespace or a global one. """ value = super(DjSecret, self).get(key, prompt_default, prompt_help='') if not value and self.raise_on_none: error_msg = "The %s setting is undefined in the environment and djset %s" % (key, self._glob) raise self.raise_on_none(error_msg) return value
def function[get, parameter[self, key, prompt_default, prompt_help]]: constant[ Return the value for key from the environment or keyring. The keyring value is resolved from a local namespace or a global one. ] variable[value] assign[=] call[call[name[super], parameter[name[DjSecret], name[self]]].get, parameter[name[key], name[prompt_default]]] if <ast.BoolOp object at 0x7da18ede6d10> begin[:] variable[error_msg] assign[=] binary_operation[constant[The %s setting is undefined in the environment and djset %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede4a60>, <ast.Attribute object at 0x7da18ede7d60>]]] <ast.Raise object at 0x7da18ede5b10> return[name[value]]
keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[prompt_default] = literal[string] , identifier[prompt_help] = literal[string] ): literal[string] identifier[value] = identifier[super] ( identifier[DjSecret] , identifier[self] ). identifier[get] ( identifier[key] , identifier[prompt_default] , identifier[prompt_help] = literal[string] ) keyword[if] keyword[not] identifier[value] keyword[and] identifier[self] . identifier[raise_on_none] : identifier[error_msg] = literal[string] %( identifier[key] , identifier[self] . identifier[_glob] ) keyword[raise] identifier[self] . identifier[raise_on_none] ( identifier[error_msg] ) keyword[return] identifier[value]
def get(self, key, prompt_default='', prompt_help=''): """ Return the value for key from the environment or keyring. The keyring value is resolved from a local namespace or a global one. """ value = super(DjSecret, self).get(key, prompt_default, prompt_help='') if not value and self.raise_on_none: error_msg = 'The %s setting is undefined in the environment and djset %s' % (key, self._glob) raise self.raise_on_none(error_msg) # depends on [control=['if'], data=[]] return value
def resize_image_to_fit_width(image, dest_w): """ Resize and image to fit the passed in width, keeping the aspect ratio the same :param image: PIL.Image :param dest_w: The desired width """ scale_factor = dest_w / image.size[0] dest_h = image.size[1] * scale_factor scaled_image = image.resize((int(dest_w), int(dest_h)), PIL.Image.ANTIALIAS) return scaled_image
def function[resize_image_to_fit_width, parameter[image, dest_w]]: constant[ Resize and image to fit the passed in width, keeping the aspect ratio the same :param image: PIL.Image :param dest_w: The desired width ] variable[scale_factor] assign[=] binary_operation[name[dest_w] / call[name[image].size][constant[0]]] variable[dest_h] assign[=] binary_operation[call[name[image].size][constant[1]] * name[scale_factor]] variable[scaled_image] assign[=] call[name[image].resize, parameter[tuple[[<ast.Call object at 0x7da204621690>, <ast.Call object at 0x7da2046216c0>]], name[PIL].Image.ANTIALIAS]] return[name[scaled_image]]
keyword[def] identifier[resize_image_to_fit_width] ( identifier[image] , identifier[dest_w] ): literal[string] identifier[scale_factor] = identifier[dest_w] / identifier[image] . identifier[size] [ literal[int] ] identifier[dest_h] = identifier[image] . identifier[size] [ literal[int] ]* identifier[scale_factor] identifier[scaled_image] = identifier[image] . identifier[resize] (( identifier[int] ( identifier[dest_w] ), identifier[int] ( identifier[dest_h] )), identifier[PIL] . identifier[Image] . identifier[ANTIALIAS] ) keyword[return] identifier[scaled_image]
def resize_image_to_fit_width(image, dest_w): """ Resize and image to fit the passed in width, keeping the aspect ratio the same :param image: PIL.Image :param dest_w: The desired width """ scale_factor = dest_w / image.size[0] dest_h = image.size[1] * scale_factor scaled_image = image.resize((int(dest_w), int(dest_h)), PIL.Image.ANTIALIAS) return scaled_image
def sftp( task: Task, src: str, dst: str, action: str, dry_run: Optional[bool] = None ) -> Result: """ Transfer files from/to the device using sftp protocol Example:: nornir.run(files.sftp, action="put", src="README.md", dst="/tmp/README.md") Arguments: dry_run: Whether to apply changes or not src: source file dst: destination action: ``put``, ``get``. Returns: Result object with the following attributes set: * changed (``bool``): * files_changed (``list``): list of files that changed """ dry_run = task.is_dry_run(dry_run) actions = {"put": put, "get": get} client = task.host.get_connection("paramiko", task.nornir.config) scp_client = SCPClient(client.get_transport()) sftp_client = paramiko.SFTPClient.from_transport(client.get_transport()) files_changed = actions[action](task, scp_client, sftp_client, src, dst, dry_run) return Result( host=task.host, changed=bool(files_changed), files_changed=files_changed )
def function[sftp, parameter[task, src, dst, action, dry_run]]: constant[ Transfer files from/to the device using sftp protocol Example:: nornir.run(files.sftp, action="put", src="README.md", dst="/tmp/README.md") Arguments: dry_run: Whether to apply changes or not src: source file dst: destination action: ``put``, ``get``. Returns: Result object with the following attributes set: * changed (``bool``): * files_changed (``list``): list of files that changed ] variable[dry_run] assign[=] call[name[task].is_dry_run, parameter[name[dry_run]]] variable[actions] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ce6440>, <ast.Constant object at 0x7da1b1ce53c0>], [<ast.Name object at 0x7da1b1ce4eb0>, <ast.Name object at 0x7da1b1ce77c0>]] variable[client] assign[=] call[name[task].host.get_connection, parameter[constant[paramiko], name[task].nornir.config]] variable[scp_client] assign[=] call[name[SCPClient], parameter[call[name[client].get_transport, parameter[]]]] variable[sftp_client] assign[=] call[name[paramiko].SFTPClient.from_transport, parameter[call[name[client].get_transport, parameter[]]]] variable[files_changed] assign[=] call[call[name[actions]][name[action]], parameter[name[task], name[scp_client], name[sftp_client], name[src], name[dst], name[dry_run]]] return[call[name[Result], parameter[]]]
keyword[def] identifier[sftp] ( identifier[task] : identifier[Task] , identifier[src] : identifier[str] , identifier[dst] : identifier[str] , identifier[action] : identifier[str] , identifier[dry_run] : identifier[Optional] [ identifier[bool] ]= keyword[None] )-> identifier[Result] : literal[string] identifier[dry_run] = identifier[task] . identifier[is_dry_run] ( identifier[dry_run] ) identifier[actions] ={ literal[string] : identifier[put] , literal[string] : identifier[get] } identifier[client] = identifier[task] . identifier[host] . identifier[get_connection] ( literal[string] , identifier[task] . identifier[nornir] . identifier[config] ) identifier[scp_client] = identifier[SCPClient] ( identifier[client] . identifier[get_transport] ()) identifier[sftp_client] = identifier[paramiko] . identifier[SFTPClient] . identifier[from_transport] ( identifier[client] . identifier[get_transport] ()) identifier[files_changed] = identifier[actions] [ identifier[action] ]( identifier[task] , identifier[scp_client] , identifier[sftp_client] , identifier[src] , identifier[dst] , identifier[dry_run] ) keyword[return] identifier[Result] ( identifier[host] = identifier[task] . identifier[host] , identifier[changed] = identifier[bool] ( identifier[files_changed] ), identifier[files_changed] = identifier[files_changed] )
def sftp(task: Task, src: str, dst: str, action: str, dry_run: Optional[bool]=None) -> Result: """ Transfer files from/to the device using sftp protocol Example:: nornir.run(files.sftp, action="put", src="README.md", dst="/tmp/README.md") Arguments: dry_run: Whether to apply changes or not src: source file dst: destination action: ``put``, ``get``. Returns: Result object with the following attributes set: * changed (``bool``): * files_changed (``list``): list of files that changed """ dry_run = task.is_dry_run(dry_run) actions = {'put': put, 'get': get} client = task.host.get_connection('paramiko', task.nornir.config) scp_client = SCPClient(client.get_transport()) sftp_client = paramiko.SFTPClient.from_transport(client.get_transport()) files_changed = actions[action](task, scp_client, sftp_client, src, dst, dry_run) return Result(host=task.host, changed=bool(files_changed), files_changed=files_changed)
def get_backspace_count(self, buffer): """ Given the input buffer, calculate how many backspaces are needed to erase the text that triggered this folder. """ if TriggerMode.ABBREVIATION in self.modes and self.backspace: if self._should_trigger_abbreviation(buffer): abbr = self._get_trigger_abbreviation(buffer) stringBefore, typedAbbr, stringAfter = self._partition_input(buffer, abbr) return len(abbr) + len(stringAfter) if self.parent is not None: return self.parent.get_backspace_count(buffer) return 0
def function[get_backspace_count, parameter[self, buffer]]: constant[ Given the input buffer, calculate how many backspaces are needed to erase the text that triggered this folder. ] if <ast.BoolOp object at 0x7da18eb578b0> begin[:] if call[name[self]._should_trigger_abbreviation, parameter[name[buffer]]] begin[:] variable[abbr] assign[=] call[name[self]._get_trigger_abbreviation, parameter[name[buffer]]] <ast.Tuple object at 0x7da20e9b0e80> assign[=] call[name[self]._partition_input, parameter[name[buffer], name[abbr]]] return[binary_operation[call[name[len], parameter[name[abbr]]] + call[name[len], parameter[name[stringAfter]]]]] if compare[name[self].parent is_not constant[None]] begin[:] return[call[name[self].parent.get_backspace_count, parameter[name[buffer]]]] return[constant[0]]
keyword[def] identifier[get_backspace_count] ( identifier[self] , identifier[buffer] ): literal[string] keyword[if] identifier[TriggerMode] . identifier[ABBREVIATION] keyword[in] identifier[self] . identifier[modes] keyword[and] identifier[self] . identifier[backspace] : keyword[if] identifier[self] . identifier[_should_trigger_abbreviation] ( identifier[buffer] ): identifier[abbr] = identifier[self] . identifier[_get_trigger_abbreviation] ( identifier[buffer] ) identifier[stringBefore] , identifier[typedAbbr] , identifier[stringAfter] = identifier[self] . identifier[_partition_input] ( identifier[buffer] , identifier[abbr] ) keyword[return] identifier[len] ( identifier[abbr] )+ identifier[len] ( identifier[stringAfter] ) keyword[if] identifier[self] . identifier[parent] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[parent] . identifier[get_backspace_count] ( identifier[buffer] ) keyword[return] literal[int]
def get_backspace_count(self, buffer): """ Given the input buffer, calculate how many backspaces are needed to erase the text that triggered this folder. """ if TriggerMode.ABBREVIATION in self.modes and self.backspace: if self._should_trigger_abbreviation(buffer): abbr = self._get_trigger_abbreviation(buffer) (stringBefore, typedAbbr, stringAfter) = self._partition_input(buffer, abbr) return len(abbr) + len(stringAfter) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.parent is not None: return self.parent.get_backspace_count(buffer) # depends on [control=['if'], data=[]] return 0
def ssh_compute_add_host_and_key(public_key, hostname, private_address, application_name, user=None): """Add a compute nodes ssh details to local cache. Collect various hostname variations and add the corresponding host keys to the local known hosts file. Finally, add the supplied public key to the authorized_key file. :param public_key: Public key. :type public_key: str :param hostname: Hostname to collect host keys from. :type hostname: str :param private_address:aCorresponding private address for hostname :type private_address: str :param application_name: Name of application eg nova-compute-something :type application_name: str :param user: The user that the ssh asserts are for. :type user: str """ # If remote compute node hands us a hostname, ensure we have a # known hosts entry for its IP, hostname and FQDN. hosts = [private_address] if not is_ipv6(private_address): if hostname: hosts.append(hostname) if is_ip(private_address): hn = get_hostname(private_address) if hn: hosts.append(hn) short = hn.split('.')[0] if ns_query(short): hosts.append(short) else: hosts.append(get_host_ip(private_address)) short = private_address.split('.')[0] if ns_query(short): hosts.append(short) for host in list(set(hosts)): add_known_host(host, application_name, user) if not ssh_authorized_key_exists(public_key, application_name, user): log('Saving SSH authorized key for compute host at %s.' % private_address) add_authorized_key(public_key, application_name, user)
def function[ssh_compute_add_host_and_key, parameter[public_key, hostname, private_address, application_name, user]]: constant[Add a compute nodes ssh details to local cache. Collect various hostname variations and add the corresponding host keys to the local known hosts file. Finally, add the supplied public key to the authorized_key file. :param public_key: Public key. :type public_key: str :param hostname: Hostname to collect host keys from. :type hostname: str :param private_address:aCorresponding private address for hostname :type private_address: str :param application_name: Name of application eg nova-compute-something :type application_name: str :param user: The user that the ssh asserts are for. :type user: str ] variable[hosts] assign[=] list[[<ast.Name object at 0x7da1b121acb0>]] if <ast.UnaryOp object at 0x7da1b1219210> begin[:] if name[hostname] begin[:] call[name[hosts].append, parameter[name[hostname]]] if call[name[is_ip], parameter[name[private_address]]] begin[:] variable[hn] assign[=] call[name[get_hostname], parameter[name[private_address]]] if name[hn] begin[:] call[name[hosts].append, parameter[name[hn]]] variable[short] assign[=] call[call[name[hn].split, parameter[constant[.]]]][constant[0]] if call[name[ns_query], parameter[name[short]]] begin[:] call[name[hosts].append, parameter[name[short]]] for taget[name[host]] in starred[call[name[list], parameter[call[name[set], parameter[name[hosts]]]]]] begin[:] call[name[add_known_host], parameter[name[host], name[application_name], name[user]]] if <ast.UnaryOp object at 0x7da1b12186d0> begin[:] call[name[log], parameter[binary_operation[constant[Saving SSH authorized key for compute host at %s.] <ast.Mod object at 0x7da2590d6920> name[private_address]]]] call[name[add_authorized_key], parameter[name[public_key], name[application_name], name[user]]]
keyword[def] identifier[ssh_compute_add_host_and_key] ( identifier[public_key] , identifier[hostname] , identifier[private_address] , identifier[application_name] , identifier[user] = keyword[None] ): literal[string] identifier[hosts] =[ identifier[private_address] ] keyword[if] keyword[not] identifier[is_ipv6] ( identifier[private_address] ): keyword[if] identifier[hostname] : identifier[hosts] . identifier[append] ( identifier[hostname] ) keyword[if] identifier[is_ip] ( identifier[private_address] ): identifier[hn] = identifier[get_hostname] ( identifier[private_address] ) keyword[if] identifier[hn] : identifier[hosts] . identifier[append] ( identifier[hn] ) identifier[short] = identifier[hn] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[ns_query] ( identifier[short] ): identifier[hosts] . identifier[append] ( identifier[short] ) keyword[else] : identifier[hosts] . identifier[append] ( identifier[get_host_ip] ( identifier[private_address] )) identifier[short] = identifier[private_address] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[ns_query] ( identifier[short] ): identifier[hosts] . identifier[append] ( identifier[short] ) keyword[for] identifier[host] keyword[in] identifier[list] ( identifier[set] ( identifier[hosts] )): identifier[add_known_host] ( identifier[host] , identifier[application_name] , identifier[user] ) keyword[if] keyword[not] identifier[ssh_authorized_key_exists] ( identifier[public_key] , identifier[application_name] , identifier[user] ): identifier[log] ( literal[string] % identifier[private_address] ) identifier[add_authorized_key] ( identifier[public_key] , identifier[application_name] , identifier[user] )
def ssh_compute_add_host_and_key(public_key, hostname, private_address, application_name, user=None): """Add a compute nodes ssh details to local cache. Collect various hostname variations and add the corresponding host keys to the local known hosts file. Finally, add the supplied public key to the authorized_key file. :param public_key: Public key. :type public_key: str :param hostname: Hostname to collect host keys from. :type hostname: str :param private_address:aCorresponding private address for hostname :type private_address: str :param application_name: Name of application eg nova-compute-something :type application_name: str :param user: The user that the ssh asserts are for. :type user: str """ # If remote compute node hands us a hostname, ensure we have a # known hosts entry for its IP, hostname and FQDN. hosts = [private_address] if not is_ipv6(private_address): if hostname: hosts.append(hostname) # depends on [control=['if'], data=[]] if is_ip(private_address): hn = get_hostname(private_address) if hn: hosts.append(hn) short = hn.split('.')[0] if ns_query(short): hosts.append(short) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: hosts.append(get_host_ip(private_address)) short = private_address.split('.')[0] if ns_query(short): hosts.append(short) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] for host in list(set(hosts)): add_known_host(host, application_name, user) # depends on [control=['for'], data=['host']] if not ssh_authorized_key_exists(public_key, application_name, user): log('Saving SSH authorized key for compute host at %s.' % private_address) add_authorized_key(public_key, application_name, user) # depends on [control=['if'], data=[]]
def generateSplines(self): """#TODO: docstring """ _ = returnSplineList(self.dependentVar, self.independentVar, subsetPercentage=self.splineSubsetPercentage, cycles=self.splineCycles, minKnotPoints=self.splineMinKnotPoins, initialKnots=self.splineInitialKnots, splineOrder=self.splineOrder, terminalExpansion=self.splineTerminalExpansion ) self.splines = _
def function[generateSplines, parameter[self]]: constant[#TODO: docstring ] variable[_] assign[=] call[name[returnSplineList], parameter[name[self].dependentVar, name[self].independentVar]] name[self].splines assign[=] name[_]
keyword[def] identifier[generateSplines] ( identifier[self] ): literal[string] identifier[_] = identifier[returnSplineList] ( identifier[self] . identifier[dependentVar] , identifier[self] . identifier[independentVar] , identifier[subsetPercentage] = identifier[self] . identifier[splineSubsetPercentage] , identifier[cycles] = identifier[self] . identifier[splineCycles] , identifier[minKnotPoints] = identifier[self] . identifier[splineMinKnotPoins] , identifier[initialKnots] = identifier[self] . identifier[splineInitialKnots] , identifier[splineOrder] = identifier[self] . identifier[splineOrder] , identifier[terminalExpansion] = identifier[self] . identifier[splineTerminalExpansion] ) identifier[self] . identifier[splines] = identifier[_]
def generateSplines(self): """#TODO: docstring """ _ = returnSplineList(self.dependentVar, self.independentVar, subsetPercentage=self.splineSubsetPercentage, cycles=self.splineCycles, minKnotPoints=self.splineMinKnotPoins, initialKnots=self.splineInitialKnots, splineOrder=self.splineOrder, terminalExpansion=self.splineTerminalExpansion) self.splines = _
def autocomplete(query, country=None, hurricanes=False, cities=True, timeout=5): """Make an autocomplete API request This can be used to find cities and/or hurricanes by name :param string query: city :param string country: restrict search to a specific country. Must be a two letter country code :param boolean hurricanes: whether to search for hurricanes or not :param boolean cities: whether to search for cities or not :param integer timeout: timeout of the api request :returns: result of the autocomplete API request :rtype: dict """ data = {} data['query'] = quote(query) data['country'] = country or '' data['hurricanes'] = 1 if hurricanes else 0 data['cities'] = 1 if cities else 0 data['format'] = 'JSON' r = requests.get(AUTOCOMPLETE_URL.format(**data), timeout=timeout) results = json.loads(r.content)['RESULTS'] return results
def function[autocomplete, parameter[query, country, hurricanes, cities, timeout]]: constant[Make an autocomplete API request This can be used to find cities and/or hurricanes by name :param string query: city :param string country: restrict search to a specific country. Must be a two letter country code :param boolean hurricanes: whether to search for hurricanes or not :param boolean cities: whether to search for cities or not :param integer timeout: timeout of the api request :returns: result of the autocomplete API request :rtype: dict ] variable[data] assign[=] dictionary[[], []] call[name[data]][constant[query]] assign[=] call[name[quote], parameter[name[query]]] call[name[data]][constant[country]] assign[=] <ast.BoolOp object at 0x7da1affd6f50> call[name[data]][constant[hurricanes]] assign[=] <ast.IfExp object at 0x7da1affd6dd0> call[name[data]][constant[cities]] assign[=] <ast.IfExp object at 0x7da1affd69e0> call[name[data]][constant[format]] assign[=] constant[JSON] variable[r] assign[=] call[name[requests].get, parameter[call[name[AUTOCOMPLETE_URL].format, parameter[]]]] variable[results] assign[=] call[call[name[json].loads, parameter[name[r].content]]][constant[RESULTS]] return[name[results]]
keyword[def] identifier[autocomplete] ( identifier[query] , identifier[country] = keyword[None] , identifier[hurricanes] = keyword[False] , identifier[cities] = keyword[True] , identifier[timeout] = literal[int] ): literal[string] identifier[data] ={} identifier[data] [ literal[string] ]= identifier[quote] ( identifier[query] ) identifier[data] [ literal[string] ]= identifier[country] keyword[or] literal[string] identifier[data] [ literal[string] ]= literal[int] keyword[if] identifier[hurricanes] keyword[else] literal[int] identifier[data] [ literal[string] ]= literal[int] keyword[if] identifier[cities] keyword[else] literal[int] identifier[data] [ literal[string] ]= literal[string] identifier[r] = identifier[requests] . identifier[get] ( identifier[AUTOCOMPLETE_URL] . identifier[format] (** identifier[data] ), identifier[timeout] = identifier[timeout] ) identifier[results] = identifier[json] . identifier[loads] ( identifier[r] . identifier[content] )[ literal[string] ] keyword[return] identifier[results]
def autocomplete(query, country=None, hurricanes=False, cities=True, timeout=5): """Make an autocomplete API request This can be used to find cities and/or hurricanes by name :param string query: city :param string country: restrict search to a specific country. Must be a two letter country code :param boolean hurricanes: whether to search for hurricanes or not :param boolean cities: whether to search for cities or not :param integer timeout: timeout of the api request :returns: result of the autocomplete API request :rtype: dict """ data = {} data['query'] = quote(query) data['country'] = country or '' data['hurricanes'] = 1 if hurricanes else 0 data['cities'] = 1 if cities else 0 data['format'] = 'JSON' r = requests.get(AUTOCOMPLETE_URL.format(**data), timeout=timeout) results = json.loads(r.content)['RESULTS'] return results
def scrape_all_files(self): """ Generator that yields one by one the return value for self.read_dcm for each file within this set """ try: for dcmf in self.items: yield self.read_dcm(dcmf) except IOError as ioe: raise IOError('Error reading DICOM file: {}.'.format(dcmf)) from ioe
def function[scrape_all_files, parameter[self]]: constant[ Generator that yields one by one the return value for self.read_dcm for each file within this set ] <ast.Try object at 0x7da1afef8550>
keyword[def] identifier[scrape_all_files] ( identifier[self] ): literal[string] keyword[try] : keyword[for] identifier[dcmf] keyword[in] identifier[self] . identifier[items] : keyword[yield] identifier[self] . identifier[read_dcm] ( identifier[dcmf] ) keyword[except] identifier[IOError] keyword[as] identifier[ioe] : keyword[raise] identifier[IOError] ( literal[string] . identifier[format] ( identifier[dcmf] )) keyword[from] identifier[ioe]
def scrape_all_files(self): """ Generator that yields one by one the return value for self.read_dcm for each file within this set """ try: for dcmf in self.items: yield self.read_dcm(dcmf) # depends on [control=['for'], data=['dcmf']] # depends on [control=['try'], data=[]] except IOError as ioe: raise IOError('Error reading DICOM file: {}.'.format(dcmf)) from ioe # depends on [control=['except'], data=['ioe']]
def update(context, resource, **kwargs): """Update a specific resource""" etag = kwargs.pop('etag') id = kwargs.pop('id') data = utils.sanitize_kwargs(**kwargs) uri = '%s/%s/%s' % (context.dci_cs_api, resource, id) r = context.session.put(uri, timeout=HTTP_TIMEOUT, headers={'If-match': etag}, json=data) return r
def function[update, parameter[context, resource]]: constant[Update a specific resource] variable[etag] assign[=] call[name[kwargs].pop, parameter[constant[etag]]] variable[id] assign[=] call[name[kwargs].pop, parameter[constant[id]]] variable[data] assign[=] call[name[utils].sanitize_kwargs, parameter[]] variable[uri] assign[=] binary_operation[constant[%s/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b24e19f0>, <ast.Name object at 0x7da1b24e2500>, <ast.Name object at 0x7da1b24e2380>]]] variable[r] assign[=] call[name[context].session.put, parameter[name[uri]]] return[name[r]]
keyword[def] identifier[update] ( identifier[context] , identifier[resource] ,** identifier[kwargs] ): literal[string] identifier[etag] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[id] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[data] = identifier[utils] . identifier[sanitize_kwargs] (** identifier[kwargs] ) identifier[uri] = literal[string] %( identifier[context] . identifier[dci_cs_api] , identifier[resource] , identifier[id] ) identifier[r] = identifier[context] . identifier[session] . identifier[put] ( identifier[uri] , identifier[timeout] = identifier[HTTP_TIMEOUT] , identifier[headers] ={ literal[string] : identifier[etag] }, identifier[json] = identifier[data] ) keyword[return] identifier[r]
def update(context, resource, **kwargs): """Update a specific resource""" etag = kwargs.pop('etag') id = kwargs.pop('id') data = utils.sanitize_kwargs(**kwargs) uri = '%s/%s/%s' % (context.dci_cs_api, resource, id) r = context.session.put(uri, timeout=HTTP_TIMEOUT, headers={'If-match': etag}, json=data) return r
def from_kwargs(cls, **kwargs): """Creates a new instance of self from the given keyword arguments. Each argument will correspond to a field in the returned array, with the name of the field given by the keyword, and the value(s) whatever the keyword was set to. Each keyword may be set to a single value or a list of values. The number of values that each argument is set to must be the same; this will be the size of the returned array. Examples -------- Create an array with fields 'mass1' and 'mass2': >>> a = FieldArray.from_kwargs(mass1=[1.1, 3.], mass2=[2., 3.]) >>> a.fieldnames ('mass1', 'mass2') >>> a.mass1, a.mass2 (array([ 1.1, 3. ]), array([ 2., 3.])) Create an array with only a single element in it: >>> a = FieldArray.from_kwargs(mass1=1.1, mass2=2.) >>> a.mass1, a.mass2 (array([ 1.1]), array([ 2.])) """ arrays = [] names = [] for p,vals in kwargs.items(): if not isinstance(vals, numpy.ndarray): if not isinstance(vals, list): vals = [vals] vals = numpy.array(vals) arrays.append(vals) names.append(p) return cls.from_arrays(arrays, names=names)
def function[from_kwargs, parameter[cls]]: constant[Creates a new instance of self from the given keyword arguments. Each argument will correspond to a field in the returned array, with the name of the field given by the keyword, and the value(s) whatever the keyword was set to. Each keyword may be set to a single value or a list of values. The number of values that each argument is set to must be the same; this will be the size of the returned array. Examples -------- Create an array with fields 'mass1' and 'mass2': >>> a = FieldArray.from_kwargs(mass1=[1.1, 3.], mass2=[2., 3.]) >>> a.fieldnames ('mass1', 'mass2') >>> a.mass1, a.mass2 (array([ 1.1, 3. ]), array([ 2., 3.])) Create an array with only a single element in it: >>> a = FieldArray.from_kwargs(mass1=1.1, mass2=2.) >>> a.mass1, a.mass2 (array([ 1.1]), array([ 2.])) ] variable[arrays] assign[=] list[[]] variable[names] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da2041db4c0>, <ast.Name object at 0x7da2041dbe80>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da2041da170> begin[:] if <ast.UnaryOp object at 0x7da2041d9c60> begin[:] variable[vals] assign[=] list[[<ast.Name object at 0x7da2041db6a0>]] variable[vals] assign[=] call[name[numpy].array, parameter[name[vals]]] call[name[arrays].append, parameter[name[vals]]] call[name[names].append, parameter[name[p]]] return[call[name[cls].from_arrays, parameter[name[arrays]]]]
keyword[def] identifier[from_kwargs] ( identifier[cls] ,** identifier[kwargs] ): literal[string] identifier[arrays] =[] identifier[names] =[] keyword[for] identifier[p] , identifier[vals] keyword[in] identifier[kwargs] . identifier[items] (): keyword[if] keyword[not] identifier[isinstance] ( identifier[vals] , identifier[numpy] . identifier[ndarray] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[vals] , identifier[list] ): identifier[vals] =[ identifier[vals] ] identifier[vals] = identifier[numpy] . identifier[array] ( identifier[vals] ) identifier[arrays] . identifier[append] ( identifier[vals] ) identifier[names] . identifier[append] ( identifier[p] ) keyword[return] identifier[cls] . identifier[from_arrays] ( identifier[arrays] , identifier[names] = identifier[names] )
def from_kwargs(cls, **kwargs): """Creates a new instance of self from the given keyword arguments. Each argument will correspond to a field in the returned array, with the name of the field given by the keyword, and the value(s) whatever the keyword was set to. Each keyword may be set to a single value or a list of values. The number of values that each argument is set to must be the same; this will be the size of the returned array. Examples -------- Create an array with fields 'mass1' and 'mass2': >>> a = FieldArray.from_kwargs(mass1=[1.1, 3.], mass2=[2., 3.]) >>> a.fieldnames ('mass1', 'mass2') >>> a.mass1, a.mass2 (array([ 1.1, 3. ]), array([ 2., 3.])) Create an array with only a single element in it: >>> a = FieldArray.from_kwargs(mass1=1.1, mass2=2.) >>> a.mass1, a.mass2 (array([ 1.1]), array([ 2.])) """ arrays = [] names = [] for (p, vals) in kwargs.items(): if not isinstance(vals, numpy.ndarray): if not isinstance(vals, list): vals = [vals] # depends on [control=['if'], data=[]] vals = numpy.array(vals) # depends on [control=['if'], data=[]] arrays.append(vals) names.append(p) # depends on [control=['for'], data=[]] return cls.from_arrays(arrays, names=names)
def cinterpolate(p, axis_values, pixelgrid): """ Interpolates in a grid prepared by create_pixeltypegrid(). Does a similar thing as :py:func:`interpolate`, but does everything in C. p is an array of parameter arrays. Careful, the shape of input :envvar:`p` and output is the transpose of :py:func:`interpolate`. @param p: Ninterpolate X Npar array @type p: array @return: Ninterpolate X Ndata array @rtype: array """ res = libphoebe.interp(p, axis_values, pixelgrid) return res
def function[cinterpolate, parameter[p, axis_values, pixelgrid]]: constant[ Interpolates in a grid prepared by create_pixeltypegrid(). Does a similar thing as :py:func:`interpolate`, but does everything in C. p is an array of parameter arrays. Careful, the shape of input :envvar:`p` and output is the transpose of :py:func:`interpolate`. @param p: Ninterpolate X Npar array @type p: array @return: Ninterpolate X Ndata array @rtype: array ] variable[res] assign[=] call[name[libphoebe].interp, parameter[name[p], name[axis_values], name[pixelgrid]]] return[name[res]]
keyword[def] identifier[cinterpolate] ( identifier[p] , identifier[axis_values] , identifier[pixelgrid] ): literal[string] identifier[res] = identifier[libphoebe] . identifier[interp] ( identifier[p] , identifier[axis_values] , identifier[pixelgrid] ) keyword[return] identifier[res]
def cinterpolate(p, axis_values, pixelgrid): """ Interpolates in a grid prepared by create_pixeltypegrid(). Does a similar thing as :py:func:`interpolate`, but does everything in C. p is an array of parameter arrays. Careful, the shape of input :envvar:`p` and output is the transpose of :py:func:`interpolate`. @param p: Ninterpolate X Npar array @type p: array @return: Ninterpolate X Ndata array @rtype: array """ res = libphoebe.interp(p, axis_values, pixelgrid) return res
def ip_to_host(ip): ''' Returns the hostname of a given IP ''' try: hostname, aliaslist, ipaddrlist = socket.gethostbyaddr(ip) except Exception as exc: log.debug('salt.utils.network.ip_to_host(%r) failed: %s', ip, exc) hostname = None return hostname
def function[ip_to_host, parameter[ip]]: constant[ Returns the hostname of a given IP ] <ast.Try object at 0x7da1b1f7b460> return[name[hostname]]
keyword[def] identifier[ip_to_host] ( identifier[ip] ): literal[string] keyword[try] : identifier[hostname] , identifier[aliaslist] , identifier[ipaddrlist] = identifier[socket] . identifier[gethostbyaddr] ( identifier[ip] ) keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[log] . identifier[debug] ( literal[string] , identifier[ip] , identifier[exc] ) identifier[hostname] = keyword[None] keyword[return] identifier[hostname]
def ip_to_host(ip): """ Returns the hostname of a given IP """ try: (hostname, aliaslist, ipaddrlist) = socket.gethostbyaddr(ip) # depends on [control=['try'], data=[]] except Exception as exc: log.debug('salt.utils.network.ip_to_host(%r) failed: %s', ip, exc) hostname = None # depends on [control=['except'], data=['exc']] return hostname
def msg2repr(msg, processor, **config): """ Return a human-readable or "natural language" representation of a dict-like fedmsg message. Think of this as the 'top-most level' function in this module. """ fmt = u"{title} -- {subtitle} {link}" title = msg2title(msg, **config) subtitle = processor.subtitle(msg, **config) link = processor.link(msg, **config) or '' return fmt.format(**locals())
def function[msg2repr, parameter[msg, processor]]: constant[ Return a human-readable or "natural language" representation of a dict-like fedmsg message. Think of this as the 'top-most level' function in this module. ] variable[fmt] assign[=] constant[{title} -- {subtitle} {link}] variable[title] assign[=] call[name[msg2title], parameter[name[msg]]] variable[subtitle] assign[=] call[name[processor].subtitle, parameter[name[msg]]] variable[link] assign[=] <ast.BoolOp object at 0x7da1b04f42e0> return[call[name[fmt].format, parameter[]]]
keyword[def] identifier[msg2repr] ( identifier[msg] , identifier[processor] ,** identifier[config] ): literal[string] identifier[fmt] = literal[string] identifier[title] = identifier[msg2title] ( identifier[msg] ,** identifier[config] ) identifier[subtitle] = identifier[processor] . identifier[subtitle] ( identifier[msg] ,** identifier[config] ) identifier[link] = identifier[processor] . identifier[link] ( identifier[msg] ,** identifier[config] ) keyword[or] literal[string] keyword[return] identifier[fmt] . identifier[format] (** identifier[locals] ())
def msg2repr(msg, processor, **config): """ Return a human-readable or "natural language" representation of a dict-like fedmsg message. Think of this as the 'top-most level' function in this module. """ fmt = u'{title} -- {subtitle} {link}' title = msg2title(msg, **config) subtitle = processor.subtitle(msg, **config) link = processor.link(msg, **config) or '' return fmt.format(**locals())
def fill_sampling(slice_list, N): """Given a list of slices, draw N samples such that each slice contributes as much as possible Parameters -------------------------- slice_list : list of Slice List of slices N : int Number of samples to draw """ A = [len(s.inliers) for s in slice_list] N_max = np.sum(A) if N > N_max: raise ValueError("Tried to draw {:d} samples from a pool of only {:d} items".format(N, N_max)) samples_from = np.zeros((len(A),), dtype='int') # Number of samples to draw from each group remaining = N while remaining > 0: remaining_groups = np.flatnonzero(samples_from - np.array(A)) if remaining < len(remaining_groups): np.random.shuffle(remaining_groups) for g in remaining_groups[:remaining]: samples_from[g] += 1 else: # Give each group the allowed number of samples. Constrain to their max size. to_each = max(1, int(remaining / len(remaining_groups))) samples_from = np.min(np.vstack((samples_from + to_each, A)), axis=0) # Update remaining count remaining = int(N - np.sum(samples_from)) if not remaining == 0: raise ValueError("Still {:d} samples left! This is an error in the selection.") # Construct index list of selected samples samples = [] for s, a, n in zip(slice_list, A, samples_from): if a == n: samples.append(np.array(s.inliers)) # all elif a == 0: samples.append(np.arange([])) else: chosen = np.random.choice(s.inliers, n, replace=False) samples.append(np.array(chosen)) return samples
def function[fill_sampling, parameter[slice_list, N]]: constant[Given a list of slices, draw N samples such that each slice contributes as much as possible Parameters -------------------------- slice_list : list of Slice List of slices N : int Number of samples to draw ] variable[A] assign[=] <ast.ListComp object at 0x7da1b0625e40> variable[N_max] assign[=] call[name[np].sum, parameter[name[A]]] if compare[name[N] greater[>] name[N_max]] begin[:] <ast.Raise object at 0x7da20c6aa7d0> variable[samples_from] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da20c6a81f0>]]]] variable[remaining] assign[=] name[N] while compare[name[remaining] greater[>] constant[0]] begin[:] variable[remaining_groups] assign[=] call[name[np].flatnonzero, parameter[binary_operation[name[samples_from] - call[name[np].array, parameter[name[A]]]]]] if compare[name[remaining] less[<] call[name[len], parameter[name[remaining_groups]]]] begin[:] call[name[np].random.shuffle, parameter[name[remaining_groups]]] for taget[name[g]] in starred[call[name[remaining_groups]][<ast.Slice object at 0x7da20c6abd90>]] begin[:] <ast.AugAssign object at 0x7da20c6aa920> variable[remaining] assign[=] call[name[int], parameter[binary_operation[name[N] - call[name[np].sum, parameter[name[samples_from]]]]]] if <ast.UnaryOp object at 0x7da20c6a86a0> begin[:] <ast.Raise object at 0x7da20c6a8880> variable[samples] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c6a9e40>, <ast.Name object at 0x7da20c6a8460>, <ast.Name object at 0x7da20c6aa0b0>]]] in starred[call[name[zip], parameter[name[slice_list], name[A], name[samples_from]]]] begin[:] if compare[name[a] equal[==] name[n]] begin[:] call[name[samples].append, parameter[call[name[np].array, parameter[name[s].inliers]]]] return[name[samples]]
keyword[def] identifier[fill_sampling] ( identifier[slice_list] , identifier[N] ): literal[string] identifier[A] =[ identifier[len] ( identifier[s] . identifier[inliers] ) keyword[for] identifier[s] keyword[in] identifier[slice_list] ] identifier[N_max] = identifier[np] . identifier[sum] ( identifier[A] ) keyword[if] identifier[N] > identifier[N_max] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[N] , identifier[N_max] )) identifier[samples_from] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[A] ),), identifier[dtype] = literal[string] ) identifier[remaining] = identifier[N] keyword[while] identifier[remaining] > literal[int] : identifier[remaining_groups] = identifier[np] . identifier[flatnonzero] ( identifier[samples_from] - identifier[np] . identifier[array] ( identifier[A] )) keyword[if] identifier[remaining] < identifier[len] ( identifier[remaining_groups] ): identifier[np] . identifier[random] . identifier[shuffle] ( identifier[remaining_groups] ) keyword[for] identifier[g] keyword[in] identifier[remaining_groups] [: identifier[remaining] ]: identifier[samples_from] [ identifier[g] ]+= literal[int] keyword[else] : identifier[to_each] = identifier[max] ( literal[int] , identifier[int] ( identifier[remaining] / identifier[len] ( identifier[remaining_groups] ))) identifier[samples_from] = identifier[np] . identifier[min] ( identifier[np] . identifier[vstack] (( identifier[samples_from] + identifier[to_each] , identifier[A] )), identifier[axis] = literal[int] ) identifier[remaining] = identifier[int] ( identifier[N] - identifier[np] . identifier[sum] ( identifier[samples_from] )) keyword[if] keyword[not] identifier[remaining] == literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[samples] =[] keyword[for] identifier[s] , identifier[a] , identifier[n] keyword[in] identifier[zip] ( identifier[slice_list] , identifier[A] , identifier[samples_from] ): keyword[if] identifier[a] == identifier[n] : identifier[samples] . identifier[append] ( identifier[np] . identifier[array] ( identifier[s] . identifier[inliers] )) keyword[elif] identifier[a] == literal[int] : identifier[samples] . identifier[append] ( identifier[np] . identifier[arange] ([])) keyword[else] : identifier[chosen] = identifier[np] . identifier[random] . identifier[choice] ( identifier[s] . identifier[inliers] , identifier[n] , identifier[replace] = keyword[False] ) identifier[samples] . identifier[append] ( identifier[np] . identifier[array] ( identifier[chosen] )) keyword[return] identifier[samples]
def fill_sampling(slice_list, N): """Given a list of slices, draw N samples such that each slice contributes as much as possible Parameters -------------------------- slice_list : list of Slice List of slices N : int Number of samples to draw """ A = [len(s.inliers) for s in slice_list] N_max = np.sum(A) if N > N_max: raise ValueError('Tried to draw {:d} samples from a pool of only {:d} items'.format(N, N_max)) # depends on [control=['if'], data=['N', 'N_max']] samples_from = np.zeros((len(A),), dtype='int') # Number of samples to draw from each group remaining = N while remaining > 0: remaining_groups = np.flatnonzero(samples_from - np.array(A)) if remaining < len(remaining_groups): np.random.shuffle(remaining_groups) for g in remaining_groups[:remaining]: samples_from[g] += 1 # depends on [control=['for'], data=['g']] # depends on [control=['if'], data=['remaining']] else: # Give each group the allowed number of samples. Constrain to their max size. to_each = max(1, int(remaining / len(remaining_groups))) samples_from = np.min(np.vstack((samples_from + to_each, A)), axis=0) # Update remaining count remaining = int(N - np.sum(samples_from)) # depends on [control=['while'], data=['remaining']] if not remaining == 0: raise ValueError('Still {:d} samples left! This is an error in the selection.') # depends on [control=['if'], data=[]] # Construct index list of selected samples samples = [] for (s, a, n) in zip(slice_list, A, samples_from): if a == n: samples.append(np.array(s.inliers)) # all # depends on [control=['if'], data=[]] elif a == 0: samples.append(np.arange([])) # depends on [control=['if'], data=[]] else: chosen = np.random.choice(s.inliers, n, replace=False) samples.append(np.array(chosen)) # depends on [control=['for'], data=[]] return samples
def register(self, server, username, password): """ Register a new GenePattern server session for the provided server, username and password. Return the session. :param server: :param username: :param password: :return: """ # Create the session session = gp.GPServer(server, username, password) # Validate username if not empty valid_username = username != "" and username is not None # Validate that the server is not already registered index = self._get_index(server) new_server = index == -1 # Add the new session to the list if valid_username and new_server: self.sessions.append(session) # Replace old session is one exists if valid_username and not new_server: self.sessions[index] = session return session
def function[register, parameter[self, server, username, password]]: constant[ Register a new GenePattern server session for the provided server, username and password. Return the session. :param server: :param username: :param password: :return: ] variable[session] assign[=] call[name[gp].GPServer, parameter[name[server], name[username], name[password]]] variable[valid_username] assign[=] <ast.BoolOp object at 0x7da18f09cdc0> variable[index] assign[=] call[name[self]._get_index, parameter[name[server]]] variable[new_server] assign[=] compare[name[index] equal[==] <ast.UnaryOp object at 0x7da20c796470>] if <ast.BoolOp object at 0x7da20c7942b0> begin[:] call[name[self].sessions.append, parameter[name[session]]] if <ast.BoolOp object at 0x7da20c794880> begin[:] call[name[self].sessions][name[index]] assign[=] name[session] return[name[session]]
keyword[def] identifier[register] ( identifier[self] , identifier[server] , identifier[username] , identifier[password] ): literal[string] identifier[session] = identifier[gp] . identifier[GPServer] ( identifier[server] , identifier[username] , identifier[password] ) identifier[valid_username] = identifier[username] != literal[string] keyword[and] identifier[username] keyword[is] keyword[not] keyword[None] identifier[index] = identifier[self] . identifier[_get_index] ( identifier[server] ) identifier[new_server] = identifier[index] ==- literal[int] keyword[if] identifier[valid_username] keyword[and] identifier[new_server] : identifier[self] . identifier[sessions] . identifier[append] ( identifier[session] ) keyword[if] identifier[valid_username] keyword[and] keyword[not] identifier[new_server] : identifier[self] . identifier[sessions] [ identifier[index] ]= identifier[session] keyword[return] identifier[session]
def register(self, server, username, password): """ Register a new GenePattern server session for the provided server, username and password. Return the session. :param server: :param username: :param password: :return: """ # Create the session session = gp.GPServer(server, username, password) # Validate username if not empty valid_username = username != '' and username is not None # Validate that the server is not already registered index = self._get_index(server) new_server = index == -1 # Add the new session to the list if valid_username and new_server: self.sessions.append(session) # depends on [control=['if'], data=[]] # Replace old session is one exists if valid_username and (not new_server): self.sessions[index] = session # depends on [control=['if'], data=[]] return session
def add_delegate(self, callback): """ Registers a new delegate callback The prototype should be function(data), where data will be the decoded json push Args: callback (function): method to trigger when push center receives events """ if callback in self._delegate_methods: return self._delegate_methods.append(callback)
def function[add_delegate, parameter[self, callback]]: constant[ Registers a new delegate callback The prototype should be function(data), where data will be the decoded json push Args: callback (function): method to trigger when push center receives events ] if compare[name[callback] in name[self]._delegate_methods] begin[:] return[None] call[name[self]._delegate_methods.append, parameter[name[callback]]]
keyword[def] identifier[add_delegate] ( identifier[self] , identifier[callback] ): literal[string] keyword[if] identifier[callback] keyword[in] identifier[self] . identifier[_delegate_methods] : keyword[return] identifier[self] . identifier[_delegate_methods] . identifier[append] ( identifier[callback] )
def add_delegate(self, callback): """ Registers a new delegate callback The prototype should be function(data), where data will be the decoded json push Args: callback (function): method to trigger when push center receives events """ if callback in self._delegate_methods: return # depends on [control=['if'], data=[]] self._delegate_methods.append(callback)
def configure_create(self, ns, definition): """ Register a create endpoint. The definition's func should be a create function, which must: - accept kwargs for the request and path data - return a new item :param ns: the namespace :param definition: the endpoint definition """ @self.add_route(ns.collection_path, Operation.Create, ns) @request(definition.request_schema) @response(definition.response_schema) @wraps(definition.func) def create(**path_data): request_data = load_request_data(definition.request_schema) response_data = definition.func(**merge_data(path_data, request_data)) headers = encode_id_header(response_data) definition.header_func(headers, response_data) response_format = self.negotiate_response_content(definition.response_formats) return dump_response_data( definition.response_schema, response_data, status_code=Operation.Create.value.default_code, headers=headers, response_format=response_format, ) create.__doc__ = "Create a new {}".format(ns.subject_name)
def function[configure_create, parameter[self, ns, definition]]: constant[ Register a create endpoint. The definition's func should be a create function, which must: - accept kwargs for the request and path data - return a new item :param ns: the namespace :param definition: the endpoint definition ] def function[create, parameter[]]: variable[request_data] assign[=] call[name[load_request_data], parameter[name[definition].request_schema]] variable[response_data] assign[=] call[name[definition].func, parameter[]] variable[headers] assign[=] call[name[encode_id_header], parameter[name[response_data]]] call[name[definition].header_func, parameter[name[headers], name[response_data]]] variable[response_format] assign[=] call[name[self].negotiate_response_content, parameter[name[definition].response_formats]] return[call[name[dump_response_data], parameter[name[definition].response_schema, name[response_data]]]] name[create].__doc__ assign[=] call[constant[Create a new {}].format, parameter[name[ns].subject_name]]
keyword[def] identifier[configure_create] ( identifier[self] , identifier[ns] , identifier[definition] ): literal[string] @ identifier[self] . identifier[add_route] ( identifier[ns] . identifier[collection_path] , identifier[Operation] . identifier[Create] , identifier[ns] ) @ identifier[request] ( identifier[definition] . identifier[request_schema] ) @ identifier[response] ( identifier[definition] . identifier[response_schema] ) @ identifier[wraps] ( identifier[definition] . identifier[func] ) keyword[def] identifier[create] (** identifier[path_data] ): identifier[request_data] = identifier[load_request_data] ( identifier[definition] . identifier[request_schema] ) identifier[response_data] = identifier[definition] . identifier[func] (** identifier[merge_data] ( identifier[path_data] , identifier[request_data] )) identifier[headers] = identifier[encode_id_header] ( identifier[response_data] ) identifier[definition] . identifier[header_func] ( identifier[headers] , identifier[response_data] ) identifier[response_format] = identifier[self] . identifier[negotiate_response_content] ( identifier[definition] . identifier[response_formats] ) keyword[return] identifier[dump_response_data] ( identifier[definition] . identifier[response_schema] , identifier[response_data] , identifier[status_code] = identifier[Operation] . identifier[Create] . identifier[value] . identifier[default_code] , identifier[headers] = identifier[headers] , identifier[response_format] = identifier[response_format] , ) identifier[create] . identifier[__doc__] = literal[string] . identifier[format] ( identifier[ns] . identifier[subject_name] )
def configure_create(self, ns, definition): """ Register a create endpoint. The definition's func should be a create function, which must: - accept kwargs for the request and path data - return a new item :param ns: the namespace :param definition: the endpoint definition """ @self.add_route(ns.collection_path, Operation.Create, ns) @request(definition.request_schema) @response(definition.response_schema) @wraps(definition.func) def create(**path_data): request_data = load_request_data(definition.request_schema) response_data = definition.func(**merge_data(path_data, request_data)) headers = encode_id_header(response_data) definition.header_func(headers, response_data) response_format = self.negotiate_response_content(definition.response_formats) return dump_response_data(definition.response_schema, response_data, status_code=Operation.Create.value.default_code, headers=headers, response_format=response_format) create.__doc__ = 'Create a new {}'.format(ns.subject_name)
def _run_dragonpy_cli(self, *args): """ Run DragonPy cli with given args. Add "--verbosity" from GUI. """ verbosity = self.frame_settings.var_verbosity.get() verbosity_no = VERBOSITY_DICT2[verbosity] log.debug("Verbosity: %i (%s)" % (verbosity_no, verbosity)) args = ( "--verbosity", "%s" % verbosity_no # "--log_list", # "--log", # "dragonpy.components.cpu6809,40", # "dragonpy.Dragon32.MC6821_PIA,50", ) + args click.echo("\n") run_dragonpy(*args, verbose=True)
def function[_run_dragonpy_cli, parameter[self]]: constant[ Run DragonPy cli with given args. Add "--verbosity" from GUI. ] variable[verbosity] assign[=] call[name[self].frame_settings.var_verbosity.get, parameter[]] variable[verbosity_no] assign[=] call[name[VERBOSITY_DICT2]][name[verbosity]] call[name[log].debug, parameter[binary_operation[constant[Verbosity: %i (%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0416800>, <ast.Name object at 0x7da1b0416770>]]]]] variable[args] assign[=] binary_operation[tuple[[<ast.Constant object at 0x7da1b0415660>, <ast.BinOp object at 0x7da1b0417f10>]] + name[args]] call[name[click].echo, parameter[constant[ ]]] call[name[run_dragonpy], parameter[<ast.Starred object at 0x7da1b0415600>]]
keyword[def] identifier[_run_dragonpy_cli] ( identifier[self] ,* identifier[args] ): literal[string] identifier[verbosity] = identifier[self] . identifier[frame_settings] . identifier[var_verbosity] . identifier[get] () identifier[verbosity_no] = identifier[VERBOSITY_DICT2] [ identifier[verbosity] ] identifier[log] . identifier[debug] ( literal[string] %( identifier[verbosity_no] , identifier[verbosity] )) identifier[args] =( literal[string] , literal[string] % identifier[verbosity_no] )+ identifier[args] identifier[click] . identifier[echo] ( literal[string] ) identifier[run_dragonpy] (* identifier[args] , identifier[verbose] = keyword[True] )
def _run_dragonpy_cli(self, *args): """ Run DragonPy cli with given args. Add "--verbosity" from GUI. """ verbosity = self.frame_settings.var_verbosity.get() verbosity_no = VERBOSITY_DICT2[verbosity] log.debug('Verbosity: %i (%s)' % (verbosity_no, verbosity)) # "--log_list", # "--log", # "dragonpy.components.cpu6809,40", # "dragonpy.Dragon32.MC6821_PIA,50", args = ('--verbosity', '%s' % verbosity_no) + args click.echo('\n') run_dragonpy(*args, verbose=True)
def memoize(Class, *args, **kwargs): ''' Memoize/record a function inside this vlermv. :: @Vlermv.cache('~/.http') def get(url): return requests.get(url, auth = ('username', 'password')) The args and kwargs get passed to the Vlermv with some slight changes. Here are the changes. First, the default ``key_transformer`` is the tuple key_transformer rather than the simple key_transformer. Second, it is valid for cache to be called without arguments. Vlermv would ordinarily fail if no arguments were passed to it. If you pass no arguments to cache, the Vlermv directory argument (the one required argument) will be set to the name of the function. Third, you are more likely to use the ``cache_exceptions`` keyword argument; see :py:class:`~vlermv.Vlermv` for documentation on that. ''' def decorator(func): if len(args) == 0: if hasattr(func, '__name__'): _args = (func.__name__,) else: raise ValueError('You must specify the location to store the vlermv.') else: _args = args v = Class(*_args, **kwargs) v.func = func return v return decorator
def function[memoize, parameter[Class]]: constant[ Memoize/record a function inside this vlermv. :: @Vlermv.cache('~/.http') def get(url): return requests.get(url, auth = ('username', 'password')) The args and kwargs get passed to the Vlermv with some slight changes. Here are the changes. First, the default ``key_transformer`` is the tuple key_transformer rather than the simple key_transformer. Second, it is valid for cache to be called without arguments. Vlermv would ordinarily fail if no arguments were passed to it. If you pass no arguments to cache, the Vlermv directory argument (the one required argument) will be set to the name of the function. Third, you are more likely to use the ``cache_exceptions`` keyword argument; see :py:class:`~vlermv.Vlermv` for documentation on that. ] def function[decorator, parameter[func]]: if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] if call[name[hasattr], parameter[name[func], constant[__name__]]] begin[:] variable[_args] assign[=] tuple[[<ast.Attribute object at 0x7da207f000d0>]] variable[v] assign[=] call[name[Class], parameter[<ast.Starred object at 0x7da207f00b20>]] name[v].func assign[=] name[func] return[name[v]] return[name[decorator]]
keyword[def] identifier[memoize] ( identifier[Class] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[decorator] ( identifier[func] ): keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[if] identifier[hasattr] ( identifier[func] , literal[string] ): identifier[_args] =( identifier[func] . identifier[__name__] ,) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : identifier[_args] = identifier[args] identifier[v] = identifier[Class] (* identifier[_args] ,** identifier[kwargs] ) identifier[v] . identifier[func] = identifier[func] keyword[return] identifier[v] keyword[return] identifier[decorator]
def memoize(Class, *args, **kwargs): """ Memoize/record a function inside this vlermv. :: @Vlermv.cache('~/.http') def get(url): return requests.get(url, auth = ('username', 'password')) The args and kwargs get passed to the Vlermv with some slight changes. Here are the changes. First, the default ``key_transformer`` is the tuple key_transformer rather than the simple key_transformer. Second, it is valid for cache to be called without arguments. Vlermv would ordinarily fail if no arguments were passed to it. If you pass no arguments to cache, the Vlermv directory argument (the one required argument) will be set to the name of the function. Third, you are more likely to use the ``cache_exceptions`` keyword argument; see :py:class:`~vlermv.Vlermv` for documentation on that. """ def decorator(func): if len(args) == 0: if hasattr(func, '__name__'): _args = (func.__name__,) # depends on [control=['if'], data=[]] else: raise ValueError('You must specify the location to store the vlermv.') # depends on [control=['if'], data=[]] else: _args = args v = Class(*_args, **kwargs) v.func = func return v return decorator
def fetchone(self): """Fetch next row""" self._check_executed() row = self.read_next() if row is None: return None self.rownumber += 1 return row
def function[fetchone, parameter[self]]: constant[Fetch next row] call[name[self]._check_executed, parameter[]] variable[row] assign[=] call[name[self].read_next, parameter[]] if compare[name[row] is constant[None]] begin[:] return[constant[None]] <ast.AugAssign object at 0x7da18f58e530> return[name[row]]
keyword[def] identifier[fetchone] ( identifier[self] ): literal[string] identifier[self] . identifier[_check_executed] () identifier[row] = identifier[self] . identifier[read_next] () keyword[if] identifier[row] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[self] . identifier[rownumber] += literal[int] keyword[return] identifier[row]
def fetchone(self): """Fetch next row""" self._check_executed() row = self.read_next() if row is None: return None # depends on [control=['if'], data=[]] self.rownumber += 1 return row
def user_login(self, email=None, password=None): """Login with email, password and get back a session cookie :type email: str :param email: The email used for authentication :type password: str :param password: The password used for authentication """ self._rpc_api = PiazzaRPC() self._rpc_api.user_login(email=email, password=password)
def function[user_login, parameter[self, email, password]]: constant[Login with email, password and get back a session cookie :type email: str :param email: The email used for authentication :type password: str :param password: The password used for authentication ] name[self]._rpc_api assign[=] call[name[PiazzaRPC], parameter[]] call[name[self]._rpc_api.user_login, parameter[]]
keyword[def] identifier[user_login] ( identifier[self] , identifier[email] = keyword[None] , identifier[password] = keyword[None] ): literal[string] identifier[self] . identifier[_rpc_api] = identifier[PiazzaRPC] () identifier[self] . identifier[_rpc_api] . identifier[user_login] ( identifier[email] = identifier[email] , identifier[password] = identifier[password] )
def user_login(self, email=None, password=None): """Login with email, password and get back a session cookie :type email: str :param email: The email used for authentication :type password: str :param password: The password used for authentication """ self._rpc_api = PiazzaRPC() self._rpc_api.user_login(email=email, password=password)
def init_sqlite_db(path, initTime=False): """ Initialize SQLite Database Args: path(str): Path to database (Ex. '/home/username/my_sqlite.db'). initTime(Optional[bool]): If True, it will print the amount of time to generate database. Example:: from gsshapy.lib.db_tools import init_sqlite_db, create_session sqlite_db_path = '/home/username/my_sqlite.db' init_postgresql_db(path=sqlite_db_path) sqlalchemy_url = init_sqlite_db(path=sqlite_db_path) db_work_sessionmaker = get_sessionmaker(sqlalchemy_url) db_work_session = db_work_sessionmaker() ##DO WORK db_work_session.close() """ sqlite_base_url = 'sqlite:///' sqlalchemy_url = sqlite_base_url + path init_time = init_db(sqlalchemy_url) if initTime: print('TIME: {0} seconds'.format(init_time)) return sqlalchemy_url
def function[init_sqlite_db, parameter[path, initTime]]: constant[ Initialize SQLite Database Args: path(str): Path to database (Ex. '/home/username/my_sqlite.db'). initTime(Optional[bool]): If True, it will print the amount of time to generate database. Example:: from gsshapy.lib.db_tools import init_sqlite_db, create_session sqlite_db_path = '/home/username/my_sqlite.db' init_postgresql_db(path=sqlite_db_path) sqlalchemy_url = init_sqlite_db(path=sqlite_db_path) db_work_sessionmaker = get_sessionmaker(sqlalchemy_url) db_work_session = db_work_sessionmaker() ##DO WORK db_work_session.close() ] variable[sqlite_base_url] assign[=] constant[sqlite:///] variable[sqlalchemy_url] assign[=] binary_operation[name[sqlite_base_url] + name[path]] variable[init_time] assign[=] call[name[init_db], parameter[name[sqlalchemy_url]]] if name[initTime] begin[:] call[name[print], parameter[call[constant[TIME: {0} seconds].format, parameter[name[init_time]]]]] return[name[sqlalchemy_url]]
keyword[def] identifier[init_sqlite_db] ( identifier[path] , identifier[initTime] = keyword[False] ): literal[string] identifier[sqlite_base_url] = literal[string] identifier[sqlalchemy_url] = identifier[sqlite_base_url] + identifier[path] identifier[init_time] = identifier[init_db] ( identifier[sqlalchemy_url] ) keyword[if] identifier[initTime] : identifier[print] ( literal[string] . identifier[format] ( identifier[init_time] )) keyword[return] identifier[sqlalchemy_url]
def init_sqlite_db(path, initTime=False): """ Initialize SQLite Database Args: path(str): Path to database (Ex. '/home/username/my_sqlite.db'). initTime(Optional[bool]): If True, it will print the amount of time to generate database. Example:: from gsshapy.lib.db_tools import init_sqlite_db, create_session sqlite_db_path = '/home/username/my_sqlite.db' init_postgresql_db(path=sqlite_db_path) sqlalchemy_url = init_sqlite_db(path=sqlite_db_path) db_work_sessionmaker = get_sessionmaker(sqlalchemy_url) db_work_session = db_work_sessionmaker() ##DO WORK db_work_session.close() """ sqlite_base_url = 'sqlite:///' sqlalchemy_url = sqlite_base_url + path init_time = init_db(sqlalchemy_url) if initTime: print('TIME: {0} seconds'.format(init_time)) # depends on [control=['if'], data=[]] return sqlalchemy_url
def create_output_directories(self): """Create output directories for thumbnails and original images.""" check_or_create_dir(self.dst_path) if self.medias: check_or_create_dir(join(self.dst_path, self.settings['thumb_dir'])) if self.medias and self.settings['keep_orig']: self.orig_path = join(self.dst_path, self.settings['orig_dir']) check_or_create_dir(self.orig_path)
def function[create_output_directories, parameter[self]]: constant[Create output directories for thumbnails and original images.] call[name[check_or_create_dir], parameter[name[self].dst_path]] if name[self].medias begin[:] call[name[check_or_create_dir], parameter[call[name[join], parameter[name[self].dst_path, call[name[self].settings][constant[thumb_dir]]]]]] if <ast.BoolOp object at 0x7da1b016d060> begin[:] name[self].orig_path assign[=] call[name[join], parameter[name[self].dst_path, call[name[self].settings][constant[orig_dir]]]] call[name[check_or_create_dir], parameter[name[self].orig_path]]
keyword[def] identifier[create_output_directories] ( identifier[self] ): literal[string] identifier[check_or_create_dir] ( identifier[self] . identifier[dst_path] ) keyword[if] identifier[self] . identifier[medias] : identifier[check_or_create_dir] ( identifier[join] ( identifier[self] . identifier[dst_path] , identifier[self] . identifier[settings] [ literal[string] ])) keyword[if] identifier[self] . identifier[medias] keyword[and] identifier[self] . identifier[settings] [ literal[string] ]: identifier[self] . identifier[orig_path] = identifier[join] ( identifier[self] . identifier[dst_path] , identifier[self] . identifier[settings] [ literal[string] ]) identifier[check_or_create_dir] ( identifier[self] . identifier[orig_path] )
def create_output_directories(self): """Create output directories for thumbnails and original images.""" check_or_create_dir(self.dst_path) if self.medias: check_or_create_dir(join(self.dst_path, self.settings['thumb_dir'])) # depends on [control=['if'], data=[]] if self.medias and self.settings['keep_orig']: self.orig_path = join(self.dst_path, self.settings['orig_dir']) check_or_create_dir(self.orig_path) # depends on [control=['if'], data=[]]
def pop_fw_local(self, tenant_id, net_id, direc, node_ip): """Populate the local cache. Read the Network DB and populate the local cache. Read the subnet from the Subnet DB, given the net_id and populate the cache. """ net = self.get_network(net_id) serv_obj = self.get_service_obj(tenant_id) serv_obj.update_fw_local_cache(net_id, direc, node_ip) if net is not None: net_dict = self.fill_dcnm_net_info(tenant_id, direc, net.vlan, net.segmentation_id) serv_obj.store_dcnm_net_dict(net_dict, direc) if direc == "in": subnet = self.service_in_ip.get_subnet_by_netid(net_id) else: subnet = self.service_out_ip.get_subnet_by_netid(net_id) if subnet is not None: subnet_dict = self.fill_dcnm_subnet_info( tenant_id, subnet, self.get_start_ip(subnet), self.get_end_ip(subnet), self.get_gateway(subnet), self.get_secondary_gateway(subnet), direc) serv_obj.store_dcnm_subnet_dict(subnet_dict, direc)
def function[pop_fw_local, parameter[self, tenant_id, net_id, direc, node_ip]]: constant[Populate the local cache. Read the Network DB and populate the local cache. Read the subnet from the Subnet DB, given the net_id and populate the cache. ] variable[net] assign[=] call[name[self].get_network, parameter[name[net_id]]] variable[serv_obj] assign[=] call[name[self].get_service_obj, parameter[name[tenant_id]]] call[name[serv_obj].update_fw_local_cache, parameter[name[net_id], name[direc], name[node_ip]]] if compare[name[net] is_not constant[None]] begin[:] variable[net_dict] assign[=] call[name[self].fill_dcnm_net_info, parameter[name[tenant_id], name[direc], name[net].vlan, name[net].segmentation_id]] call[name[serv_obj].store_dcnm_net_dict, parameter[name[net_dict], name[direc]]] if compare[name[direc] equal[==] constant[in]] begin[:] variable[subnet] assign[=] call[name[self].service_in_ip.get_subnet_by_netid, parameter[name[net_id]]] if compare[name[subnet] is_not constant[None]] begin[:] variable[subnet_dict] assign[=] call[name[self].fill_dcnm_subnet_info, parameter[name[tenant_id], name[subnet], call[name[self].get_start_ip, parameter[name[subnet]]], call[name[self].get_end_ip, parameter[name[subnet]]], call[name[self].get_gateway, parameter[name[subnet]]], call[name[self].get_secondary_gateway, parameter[name[subnet]]], name[direc]]] call[name[serv_obj].store_dcnm_subnet_dict, parameter[name[subnet_dict], name[direc]]]
keyword[def] identifier[pop_fw_local] ( identifier[self] , identifier[tenant_id] , identifier[net_id] , identifier[direc] , identifier[node_ip] ): literal[string] identifier[net] = identifier[self] . identifier[get_network] ( identifier[net_id] ) identifier[serv_obj] = identifier[self] . identifier[get_service_obj] ( identifier[tenant_id] ) identifier[serv_obj] . identifier[update_fw_local_cache] ( identifier[net_id] , identifier[direc] , identifier[node_ip] ) keyword[if] identifier[net] keyword[is] keyword[not] keyword[None] : identifier[net_dict] = identifier[self] . identifier[fill_dcnm_net_info] ( identifier[tenant_id] , identifier[direc] , identifier[net] . identifier[vlan] , identifier[net] . identifier[segmentation_id] ) identifier[serv_obj] . identifier[store_dcnm_net_dict] ( identifier[net_dict] , identifier[direc] ) keyword[if] identifier[direc] == literal[string] : identifier[subnet] = identifier[self] . identifier[service_in_ip] . identifier[get_subnet_by_netid] ( identifier[net_id] ) keyword[else] : identifier[subnet] = identifier[self] . identifier[service_out_ip] . identifier[get_subnet_by_netid] ( identifier[net_id] ) keyword[if] identifier[subnet] keyword[is] keyword[not] keyword[None] : identifier[subnet_dict] = identifier[self] . identifier[fill_dcnm_subnet_info] ( identifier[tenant_id] , identifier[subnet] , identifier[self] . identifier[get_start_ip] ( identifier[subnet] ), identifier[self] . identifier[get_end_ip] ( identifier[subnet] ), identifier[self] . identifier[get_gateway] ( identifier[subnet] ), identifier[self] . identifier[get_secondary_gateway] ( identifier[subnet] ), identifier[direc] ) identifier[serv_obj] . identifier[store_dcnm_subnet_dict] ( identifier[subnet_dict] , identifier[direc] )
def pop_fw_local(self, tenant_id, net_id, direc, node_ip): """Populate the local cache. Read the Network DB and populate the local cache. Read the subnet from the Subnet DB, given the net_id and populate the cache. """ net = self.get_network(net_id) serv_obj = self.get_service_obj(tenant_id) serv_obj.update_fw_local_cache(net_id, direc, node_ip) if net is not None: net_dict = self.fill_dcnm_net_info(tenant_id, direc, net.vlan, net.segmentation_id) serv_obj.store_dcnm_net_dict(net_dict, direc) # depends on [control=['if'], data=['net']] if direc == 'in': subnet = self.service_in_ip.get_subnet_by_netid(net_id) # depends on [control=['if'], data=[]] else: subnet = self.service_out_ip.get_subnet_by_netid(net_id) if subnet is not None: subnet_dict = self.fill_dcnm_subnet_info(tenant_id, subnet, self.get_start_ip(subnet), self.get_end_ip(subnet), self.get_gateway(subnet), self.get_secondary_gateway(subnet), direc) serv_obj.store_dcnm_subnet_dict(subnet_dict, direc) # depends on [control=['if'], data=['subnet']]
def get_machine_stats(self): ''' Gather spider based stats ''' self.logger.debug("Gathering machine stats") the_dict = {} keys = self.redis_conn.keys('stats:crawler:*:*:*:*') for key in keys: # break down key elements = key.split(":") machine = elements[2] spider = elements[3] response = elements[4] end = elements[5] # we only care about the machine, not spider type if machine not in the_dict: the_dict[machine] = {} if response not in the_dict[machine]: the_dict[machine][response] = {} if end in the_dict[machine][response]: the_dict[machine][response][end] = the_dict[machine][response][end] + \ self._get_key_value(key, end == 'lifetime') else: the_dict[machine][response][end] = self._get_key_value(key, end == 'lifetime') # simple count the_dict['count'] = len(list(the_dict.keys())) ret_dict = {} ret_dict['machines'] = the_dict return ret_dict
def function[get_machine_stats, parameter[self]]: constant[ Gather spider based stats ] call[name[self].logger.debug, parameter[constant[Gathering machine stats]]] variable[the_dict] assign[=] dictionary[[], []] variable[keys] assign[=] call[name[self].redis_conn.keys, parameter[constant[stats:crawler:*:*:*:*]]] for taget[name[key]] in starred[name[keys]] begin[:] variable[elements] assign[=] call[name[key].split, parameter[constant[:]]] variable[machine] assign[=] call[name[elements]][constant[2]] variable[spider] assign[=] call[name[elements]][constant[3]] variable[response] assign[=] call[name[elements]][constant[4]] variable[end] assign[=] call[name[elements]][constant[5]] if compare[name[machine] <ast.NotIn object at 0x7da2590d7190> name[the_dict]] begin[:] call[name[the_dict]][name[machine]] assign[=] dictionary[[], []] if compare[name[response] <ast.NotIn object at 0x7da2590d7190> call[name[the_dict]][name[machine]]] begin[:] call[call[name[the_dict]][name[machine]]][name[response]] assign[=] dictionary[[], []] if compare[name[end] in call[call[name[the_dict]][name[machine]]][name[response]]] begin[:] call[call[call[name[the_dict]][name[machine]]][name[response]]][name[end]] assign[=] binary_operation[call[call[call[name[the_dict]][name[machine]]][name[response]]][name[end]] + call[name[self]._get_key_value, parameter[name[key], compare[name[end] equal[==] constant[lifetime]]]]] call[name[the_dict]][constant[count]] assign[=] call[name[len], parameter[call[name[list], parameter[call[name[the_dict].keys, parameter[]]]]]] variable[ret_dict] assign[=] dictionary[[], []] call[name[ret_dict]][constant[machines]] assign[=] name[the_dict] return[name[ret_dict]]
keyword[def] identifier[get_machine_stats] ( identifier[self] ): literal[string] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[the_dict] ={} identifier[keys] = identifier[self] . identifier[redis_conn] . identifier[keys] ( literal[string] ) keyword[for] identifier[key] keyword[in] identifier[keys] : identifier[elements] = identifier[key] . identifier[split] ( literal[string] ) identifier[machine] = identifier[elements] [ literal[int] ] identifier[spider] = identifier[elements] [ literal[int] ] identifier[response] = identifier[elements] [ literal[int] ] identifier[end] = identifier[elements] [ literal[int] ] keyword[if] identifier[machine] keyword[not] keyword[in] identifier[the_dict] : identifier[the_dict] [ identifier[machine] ]={} keyword[if] identifier[response] keyword[not] keyword[in] identifier[the_dict] [ identifier[machine] ]: identifier[the_dict] [ identifier[machine] ][ identifier[response] ]={} keyword[if] identifier[end] keyword[in] identifier[the_dict] [ identifier[machine] ][ identifier[response] ]: identifier[the_dict] [ identifier[machine] ][ identifier[response] ][ identifier[end] ]= identifier[the_dict] [ identifier[machine] ][ identifier[response] ][ identifier[end] ]+ identifier[self] . identifier[_get_key_value] ( identifier[key] , identifier[end] == literal[string] ) keyword[else] : identifier[the_dict] [ identifier[machine] ][ identifier[response] ][ identifier[end] ]= identifier[self] . identifier[_get_key_value] ( identifier[key] , identifier[end] == literal[string] ) identifier[the_dict] [ literal[string] ]= identifier[len] ( identifier[list] ( identifier[the_dict] . identifier[keys] ())) identifier[ret_dict] ={} identifier[ret_dict] [ literal[string] ]= identifier[the_dict] keyword[return] identifier[ret_dict]
def get_machine_stats(self): """ Gather spider based stats """ self.logger.debug('Gathering machine stats') the_dict = {} keys = self.redis_conn.keys('stats:crawler:*:*:*:*') for key in keys: # break down key elements = key.split(':') machine = elements[2] spider = elements[3] response = elements[4] end = elements[5] # we only care about the machine, not spider type if machine not in the_dict: the_dict[machine] = {} # depends on [control=['if'], data=['machine', 'the_dict']] if response not in the_dict[machine]: the_dict[machine][response] = {} # depends on [control=['if'], data=['response']] if end in the_dict[machine][response]: the_dict[machine][response][end] = the_dict[machine][response][end] + self._get_key_value(key, end == 'lifetime') # depends on [control=['if'], data=['end']] else: the_dict[machine][response][end] = self._get_key_value(key, end == 'lifetime') # depends on [control=['for'], data=['key']] # simple count the_dict['count'] = len(list(the_dict.keys())) ret_dict = {} ret_dict['machines'] = the_dict return ret_dict
def remove_sort(self, field_name): """ Clears sorting criteria affecting ``field_name``. """ self.sorts = [dict(field=value) for field, value in self.sorts if field is not field_name]
def function[remove_sort, parameter[self, field_name]]: constant[ Clears sorting criteria affecting ``field_name``. ] name[self].sorts assign[=] <ast.ListComp object at 0x7da204620670>
keyword[def] identifier[remove_sort] ( identifier[self] , identifier[field_name] ): literal[string] identifier[self] . identifier[sorts] =[ identifier[dict] ( identifier[field] = identifier[value] ) keyword[for] identifier[field] , identifier[value] keyword[in] identifier[self] . identifier[sorts] keyword[if] identifier[field] keyword[is] keyword[not] identifier[field_name] ]
def remove_sort(self, field_name): """ Clears sorting criteria affecting ``field_name``. """ self.sorts = [dict(field=value) for (field, value) in self.sorts if field is not field_name]
def handleStatus(self, version, code, message): "extends handleStatus to instantiate a local response object" proxy.ProxyClient.handleStatus(self, version, code, message) # client.Response is currently just a container for needed data self._response = client.Response(version, code, message, {}, None)
def function[handleStatus, parameter[self, version, code, message]]: constant[extends handleStatus to instantiate a local response object] call[name[proxy].ProxyClient.handleStatus, parameter[name[self], name[version], name[code], name[message]]] name[self]._response assign[=] call[name[client].Response, parameter[name[version], name[code], name[message], dictionary[[], []], constant[None]]]
keyword[def] identifier[handleStatus] ( identifier[self] , identifier[version] , identifier[code] , identifier[message] ): literal[string] identifier[proxy] . identifier[ProxyClient] . identifier[handleStatus] ( identifier[self] , identifier[version] , identifier[code] , identifier[message] ) identifier[self] . identifier[_response] = identifier[client] . identifier[Response] ( identifier[version] , identifier[code] , identifier[message] ,{}, keyword[None] )
def handleStatus(self, version, code, message): """extends handleStatus to instantiate a local response object""" proxy.ProxyClient.handleStatus(self, version, code, message) # client.Response is currently just a container for needed data self._response = client.Response(version, code, message, {}, None)
def extract_optional_location_root_info(ir_blocks): """Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides """ complex_optional_roots = [] location_to_optional_roots = dict() # These are both stacks that perform depth-first search on the tree of @optional edges. # At any given location they contain # - in_optional_root_locations: all the optional root locations # - encountered_traverse_within_optional: whether the optional is complex or not # in order that they appear on the path from the root to that location. in_optional_root_locations = [] encountered_traverse_within_optional = [] # Blocks within folded scopes should not be taken into account in this function. _, non_folded_ir_blocks = extract_folds_from_ir_blocks(ir_blocks) preceding_location = None for current_block in non_folded_ir_blocks: if len(in_optional_root_locations) > 0 and isinstance(current_block, (Traverse, Recurse)): encountered_traverse_within_optional[-1] = True if isinstance(current_block, Traverse) and current_block.optional: if preceding_location is None: raise AssertionError(u'No MarkLocation found before an optional Traverse: {} {}' .format(current_block, non_folded_ir_blocks)) in_optional_root_locations.append(preceding_location) encountered_traverse_within_optional.append(False) elif isinstance(current_block, EndOptional): if len(in_optional_root_locations) == 0: raise AssertionError(u'in_optional_root_locations was empty at an EndOptional ' u'block: {}'.format(ir_blocks)) if encountered_traverse_within_optional[-1]: complex_optional_roots.append(in_optional_root_locations[-1]) in_optional_root_locations.pop() encountered_traverse_within_optional.pop() elif isinstance(current_block, MarkLocation): preceding_location = current_block.location if len(in_optional_root_locations) != 0: # in_optional_root_locations will not be empty if and only if we are within an # @optional scope. In this case, we add the current location to the dictionary # mapping it to the sequence of optionals locations leading up to it. optional_root_locations_stack = tuple(in_optional_root_locations) location_to_optional_roots[current_block.location] = optional_root_locations_stack else: # No locations need to be marked, and no optional scopes begin or end here. pass return complex_optional_roots, location_to_optional_roots
def function[extract_optional_location_root_info, parameter[ir_blocks]]: constant[Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides ] variable[complex_optional_roots] assign[=] list[[]] variable[location_to_optional_roots] assign[=] call[name[dict], parameter[]] variable[in_optional_root_locations] assign[=] list[[]] variable[encountered_traverse_within_optional] assign[=] list[[]] <ast.Tuple object at 0x7da1b1725840> assign[=] call[name[extract_folds_from_ir_blocks], parameter[name[ir_blocks]]] variable[preceding_location] assign[=] constant[None] for taget[name[current_block]] in starred[name[non_folded_ir_blocks]] begin[:] if <ast.BoolOp object at 0x7da1b1724b80> begin[:] call[name[encountered_traverse_within_optional]][<ast.UnaryOp object at 0x7da1b1725ea0>] assign[=] constant[True] if <ast.BoolOp object at 0x7da1b1725870> begin[:] if compare[name[preceding_location] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1726fe0> call[name[in_optional_root_locations].append, parameter[name[preceding_location]]] call[name[encountered_traverse_within_optional].append, parameter[constant[False]]] return[tuple[[<ast.Name object at 0x7da2045648e0>, <ast.Name object at 0x7da204565c00>]]]
keyword[def] identifier[extract_optional_location_root_info] ( identifier[ir_blocks] ): literal[string] identifier[complex_optional_roots] =[] identifier[location_to_optional_roots] = identifier[dict] () identifier[in_optional_root_locations] =[] identifier[encountered_traverse_within_optional] =[] identifier[_] , identifier[non_folded_ir_blocks] = identifier[extract_folds_from_ir_blocks] ( identifier[ir_blocks] ) identifier[preceding_location] = keyword[None] keyword[for] identifier[current_block] keyword[in] identifier[non_folded_ir_blocks] : keyword[if] identifier[len] ( identifier[in_optional_root_locations] )> literal[int] keyword[and] identifier[isinstance] ( identifier[current_block] ,( identifier[Traverse] , identifier[Recurse] )): identifier[encountered_traverse_within_optional] [- literal[int] ]= keyword[True] keyword[if] identifier[isinstance] ( identifier[current_block] , identifier[Traverse] ) keyword[and] identifier[current_block] . identifier[optional] : keyword[if] identifier[preceding_location] keyword[is] keyword[None] : keyword[raise] identifier[AssertionError] ( literal[string] . identifier[format] ( identifier[current_block] , identifier[non_folded_ir_blocks] )) identifier[in_optional_root_locations] . identifier[append] ( identifier[preceding_location] ) identifier[encountered_traverse_within_optional] . identifier[append] ( keyword[False] ) keyword[elif] identifier[isinstance] ( identifier[current_block] , identifier[EndOptional] ): keyword[if] identifier[len] ( identifier[in_optional_root_locations] )== literal[int] : keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[ir_blocks] )) keyword[if] identifier[encountered_traverse_within_optional] [- literal[int] ]: identifier[complex_optional_roots] . identifier[append] ( identifier[in_optional_root_locations] [- literal[int] ]) identifier[in_optional_root_locations] . identifier[pop] () identifier[encountered_traverse_within_optional] . identifier[pop] () keyword[elif] identifier[isinstance] ( identifier[current_block] , identifier[MarkLocation] ): identifier[preceding_location] = identifier[current_block] . identifier[location] keyword[if] identifier[len] ( identifier[in_optional_root_locations] )!= literal[int] : identifier[optional_root_locations_stack] = identifier[tuple] ( identifier[in_optional_root_locations] ) identifier[location_to_optional_roots] [ identifier[current_block] . identifier[location] ]= identifier[optional_root_locations_stack] keyword[else] : keyword[pass] keyword[return] identifier[complex_optional_roots] , identifier[location_to_optional_roots]
def extract_optional_location_root_info(ir_blocks): """Construct a mapping from locations within @optional to their correspoding optional Traverse. Args: ir_blocks: list of IR blocks to extract optional data from Returns: tuple (complex_optional_roots, location_to_optional_roots): complex_optional_roots: list of @optional locations (location immmediately preceding an @optional Traverse) that expand vertex fields location_to_optional_roots: dict mapping from location -> optional_roots where location is within some number of @optionals and optional_roots is a list of optional root locations preceding the successive @optional scopes within which the location resides """ complex_optional_roots = [] location_to_optional_roots = dict() # These are both stacks that perform depth-first search on the tree of @optional edges. # At any given location they contain # - in_optional_root_locations: all the optional root locations # - encountered_traverse_within_optional: whether the optional is complex or not # in order that they appear on the path from the root to that location. in_optional_root_locations = [] encountered_traverse_within_optional = [] # Blocks within folded scopes should not be taken into account in this function. (_, non_folded_ir_blocks) = extract_folds_from_ir_blocks(ir_blocks) preceding_location = None for current_block in non_folded_ir_blocks: if len(in_optional_root_locations) > 0 and isinstance(current_block, (Traverse, Recurse)): encountered_traverse_within_optional[-1] = True # depends on [control=['if'], data=[]] if isinstance(current_block, Traverse) and current_block.optional: if preceding_location is None: raise AssertionError(u'No MarkLocation found before an optional Traverse: {} {}'.format(current_block, non_folded_ir_blocks)) # depends on [control=['if'], data=[]] in_optional_root_locations.append(preceding_location) encountered_traverse_within_optional.append(False) # depends on [control=['if'], data=[]] elif isinstance(current_block, EndOptional): if len(in_optional_root_locations) == 0: raise AssertionError(u'in_optional_root_locations was empty at an EndOptional block: {}'.format(ir_blocks)) # depends on [control=['if'], data=[]] if encountered_traverse_within_optional[-1]: complex_optional_roots.append(in_optional_root_locations[-1]) # depends on [control=['if'], data=[]] in_optional_root_locations.pop() encountered_traverse_within_optional.pop() # depends on [control=['if'], data=[]] elif isinstance(current_block, MarkLocation): preceding_location = current_block.location if len(in_optional_root_locations) != 0: # in_optional_root_locations will not be empty if and only if we are within an # @optional scope. In this case, we add the current location to the dictionary # mapping it to the sequence of optionals locations leading up to it. optional_root_locations_stack = tuple(in_optional_root_locations) location_to_optional_roots[current_block.location] = optional_root_locations_stack # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # No locations need to be marked, and no optional scopes begin or end here. pass # depends on [control=['for'], data=['current_block']] return (complex_optional_roots, location_to_optional_roots)
def _wrap_type_instantiation(self, type_cls): """Wrap the creation of the type so that we can provide a null-stream to initialize it""" def wrapper(*args, **kwargs): # use args for struct arguments?? return type_cls(stream=self._null_stream) return wrapper
def function[_wrap_type_instantiation, parameter[self, type_cls]]: constant[Wrap the creation of the type so that we can provide a null-stream to initialize it] def function[wrapper, parameter[]]: return[call[name[type_cls], parameter[]]] return[name[wrapper]]
keyword[def] identifier[_wrap_type_instantiation] ( identifier[self] , identifier[type_cls] ): literal[string] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): keyword[return] identifier[type_cls] ( identifier[stream] = identifier[self] . identifier[_null_stream] ) keyword[return] identifier[wrapper]
def _wrap_type_instantiation(self, type_cls): """Wrap the creation of the type so that we can provide a null-stream to initialize it""" def wrapper(*args, **kwargs): # use args for struct arguments?? return type_cls(stream=self._null_stream) return wrapper
def _populate_audio_file(self): """ Create the ``self.audio_file`` object by reading the audio file at ``self.audio_file_path_absolute``. """ self.log(u"Populate audio file...") if self.audio_file_path_absolute is not None: self.log([u"audio_file_path_absolute is '%s'", self.audio_file_path_absolute]) self.audio_file = AudioFile( file_path=self.audio_file_path_absolute, logger=self.logger ) self.audio_file.read_properties() else: self.log(u"audio_file_path_absolute is None") self.log(u"Populate audio file... done")
def function[_populate_audio_file, parameter[self]]: constant[ Create the ``self.audio_file`` object by reading the audio file at ``self.audio_file_path_absolute``. ] call[name[self].log, parameter[constant[Populate audio file...]]] if compare[name[self].audio_file_path_absolute is_not constant[None]] begin[:] call[name[self].log, parameter[list[[<ast.Constant object at 0x7da18bc70d00>, <ast.Attribute object at 0x7da18bc73550>]]]] name[self].audio_file assign[=] call[name[AudioFile], parameter[]] call[name[self].audio_file.read_properties, parameter[]] call[name[self].log, parameter[constant[Populate audio file... done]]]
keyword[def] identifier[_populate_audio_file] ( identifier[self] ): literal[string] identifier[self] . identifier[log] ( literal[string] ) keyword[if] identifier[self] . identifier[audio_file_path_absolute] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[log] ([ literal[string] , identifier[self] . identifier[audio_file_path_absolute] ]) identifier[self] . identifier[audio_file] = identifier[AudioFile] ( identifier[file_path] = identifier[self] . identifier[audio_file_path_absolute] , identifier[logger] = identifier[self] . identifier[logger] ) identifier[self] . identifier[audio_file] . identifier[read_properties] () keyword[else] : identifier[self] . identifier[log] ( literal[string] ) identifier[self] . identifier[log] ( literal[string] )
def _populate_audio_file(self): """ Create the ``self.audio_file`` object by reading the audio file at ``self.audio_file_path_absolute``. """ self.log(u'Populate audio file...') if self.audio_file_path_absolute is not None: self.log([u"audio_file_path_absolute is '%s'", self.audio_file_path_absolute]) self.audio_file = AudioFile(file_path=self.audio_file_path_absolute, logger=self.logger) self.audio_file.read_properties() # depends on [control=['if'], data=[]] else: self.log(u'audio_file_path_absolute is None') self.log(u'Populate audio file... done')
def visit_ExceptHandler(self, node): """OUT = body's, RAISES = body's""" currs = (node,) raises = () for n in node.body: self.result.add_node(n) for curr in currs: self.result.add_edge(curr, n) currs, nraises = self.visit(n) raises += nraises return currs, raises
def function[visit_ExceptHandler, parameter[self, node]]: constant[OUT = body's, RAISES = body's] variable[currs] assign[=] tuple[[<ast.Name object at 0x7da18dc05e40>]] variable[raises] assign[=] tuple[[]] for taget[name[n]] in starred[name[node].body] begin[:] call[name[self].result.add_node, parameter[name[n]]] for taget[name[curr]] in starred[name[currs]] begin[:] call[name[self].result.add_edge, parameter[name[curr], name[n]]] <ast.Tuple object at 0x7da18dc069e0> assign[=] call[name[self].visit, parameter[name[n]]] <ast.AugAssign object at 0x7da18dc065f0> return[tuple[[<ast.Name object at 0x7da18dc04cd0>, <ast.Name object at 0x7da18dc05780>]]]
keyword[def] identifier[visit_ExceptHandler] ( identifier[self] , identifier[node] ): literal[string] identifier[currs] =( identifier[node] ,) identifier[raises] =() keyword[for] identifier[n] keyword[in] identifier[node] . identifier[body] : identifier[self] . identifier[result] . identifier[add_node] ( identifier[n] ) keyword[for] identifier[curr] keyword[in] identifier[currs] : identifier[self] . identifier[result] . identifier[add_edge] ( identifier[curr] , identifier[n] ) identifier[currs] , identifier[nraises] = identifier[self] . identifier[visit] ( identifier[n] ) identifier[raises] += identifier[nraises] keyword[return] identifier[currs] , identifier[raises]
def visit_ExceptHandler(self, node): """OUT = body's, RAISES = body's""" currs = (node,) raises = () for n in node.body: self.result.add_node(n) for curr in currs: self.result.add_edge(curr, n) # depends on [control=['for'], data=['curr']] (currs, nraises) = self.visit(n) raises += nraises # depends on [control=['for'], data=['n']] return (currs, raises)
def cylinder_inertia(mass, radius, height, transform=None): """ Return the inertia tensor of a cylinder. Parameters ------------ mass : float Mass of cylinder radius : float Radius of cylinder height : float Height of cylinder transform : (4,4) float Transformation of cylinder Returns ------------ inertia : (3,3) float Inertia tensor """ h2, r2 = height ** 2, radius ** 2 diagonal = np.array([((mass * h2) / 12) + ((mass * r2) / 4), ((mass * h2) / 12) + ((mass * r2) / 4), (mass * r2) / 2]) inertia = diagonal * np.eye(3) if transform is not None: inertia = transform_inertia(transform, inertia) return inertia
def function[cylinder_inertia, parameter[mass, radius, height, transform]]: constant[ Return the inertia tensor of a cylinder. Parameters ------------ mass : float Mass of cylinder radius : float Radius of cylinder height : float Height of cylinder transform : (4,4) float Transformation of cylinder Returns ------------ inertia : (3,3) float Inertia tensor ] <ast.Tuple object at 0x7da18f810040> assign[=] tuple[[<ast.BinOp object at 0x7da18f813010>, <ast.BinOp object at 0x7da18f8119f0>]] variable[diagonal] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da2045652d0>, <ast.BinOp object at 0x7da2045673a0>, <ast.BinOp object at 0x7da204564820>]]]] variable[inertia] assign[=] binary_operation[name[diagonal] * call[name[np].eye, parameter[constant[3]]]] if compare[name[transform] is_not constant[None]] begin[:] variable[inertia] assign[=] call[name[transform_inertia], parameter[name[transform], name[inertia]]] return[name[inertia]]
keyword[def] identifier[cylinder_inertia] ( identifier[mass] , identifier[radius] , identifier[height] , identifier[transform] = keyword[None] ): literal[string] identifier[h2] , identifier[r2] = identifier[height] ** literal[int] , identifier[radius] ** literal[int] identifier[diagonal] = identifier[np] . identifier[array] ([(( identifier[mass] * identifier[h2] )/ literal[int] )+(( identifier[mass] * identifier[r2] )/ literal[int] ), (( identifier[mass] * identifier[h2] )/ literal[int] )+(( identifier[mass] * identifier[r2] )/ literal[int] ), ( identifier[mass] * identifier[r2] )/ literal[int] ]) identifier[inertia] = identifier[diagonal] * identifier[np] . identifier[eye] ( literal[int] ) keyword[if] identifier[transform] keyword[is] keyword[not] keyword[None] : identifier[inertia] = identifier[transform_inertia] ( identifier[transform] , identifier[inertia] ) keyword[return] identifier[inertia]
def cylinder_inertia(mass, radius, height, transform=None): """ Return the inertia tensor of a cylinder. Parameters ------------ mass : float Mass of cylinder radius : float Radius of cylinder height : float Height of cylinder transform : (4,4) float Transformation of cylinder Returns ------------ inertia : (3,3) float Inertia tensor """ (h2, r2) = (height ** 2, radius ** 2) diagonal = np.array([mass * h2 / 12 + mass * r2 / 4, mass * h2 / 12 + mass * r2 / 4, mass * r2 / 2]) inertia = diagonal * np.eye(3) if transform is not None: inertia = transform_inertia(transform, inertia) # depends on [control=['if'], data=['transform']] return inertia
def server(self): """ All in one endpoints. This property is created automaticly if you have implemented all the getters and setters. However, if you are not satisfied with the getter and setter, you can create a validator with :class:`OAuth2RequestValidator`:: class MyValidator(OAuth2RequestValidator): def validate_client_id(self, client_id): # do something return True And assign the validator for the provider:: oauth._validator = MyValidator() """ expires_in = self.app.config.get('OAUTH2_PROVIDER_TOKEN_EXPIRES_IN') token_generator = self.app.config.get( 'OAUTH2_PROVIDER_TOKEN_GENERATOR', None ) if token_generator and not callable(token_generator): token_generator = import_string(token_generator) refresh_token_generator = self.app.config.get( 'OAUTH2_PROVIDER_REFRESH_TOKEN_GENERATOR', None ) if refresh_token_generator and not callable(refresh_token_generator): refresh_token_generator = import_string(refresh_token_generator) if hasattr(self, '_validator'): return Server( self._validator, token_expires_in=expires_in, token_generator=token_generator, refresh_token_generator=refresh_token_generator, ) if hasattr(self, '_clientgetter') and \ hasattr(self, '_tokengetter') and \ hasattr(self, '_tokensetter') and \ hasattr(self, '_grantgetter') and \ hasattr(self, '_grantsetter'): usergetter = None if hasattr(self, '_usergetter'): usergetter = self._usergetter validator_class = self._validator_class if validator_class is None: validator_class = OAuth2RequestValidator validator = validator_class( clientgetter=self._clientgetter, tokengetter=self._tokengetter, grantgetter=self._grantgetter, usergetter=usergetter, tokensetter=self._tokensetter, grantsetter=self._grantsetter, ) self._validator = validator return Server( validator, token_expires_in=expires_in, token_generator=token_generator, refresh_token_generator=refresh_token_generator, ) raise RuntimeError('application not bound to required getters')
def function[server, parameter[self]]: constant[ All in one endpoints. This property is created automaticly if you have implemented all the getters and setters. However, if you are not satisfied with the getter and setter, you can create a validator with :class:`OAuth2RequestValidator`:: class MyValidator(OAuth2RequestValidator): def validate_client_id(self, client_id): # do something return True And assign the validator for the provider:: oauth._validator = MyValidator() ] variable[expires_in] assign[=] call[name[self].app.config.get, parameter[constant[OAUTH2_PROVIDER_TOKEN_EXPIRES_IN]]] variable[token_generator] assign[=] call[name[self].app.config.get, parameter[constant[OAUTH2_PROVIDER_TOKEN_GENERATOR], constant[None]]] if <ast.BoolOp object at 0x7da1b02f0d90> begin[:] variable[token_generator] assign[=] call[name[import_string], parameter[name[token_generator]]] variable[refresh_token_generator] assign[=] call[name[self].app.config.get, parameter[constant[OAUTH2_PROVIDER_REFRESH_TOKEN_GENERATOR], constant[None]]] if <ast.BoolOp object at 0x7da1b02f18a0> begin[:] variable[refresh_token_generator] assign[=] call[name[import_string], parameter[name[refresh_token_generator]]] if call[name[hasattr], parameter[name[self], constant[_validator]]] begin[:] return[call[name[Server], parameter[name[self]._validator]]] if <ast.BoolOp object at 0x7da1b0317490> begin[:] variable[usergetter] assign[=] constant[None] if call[name[hasattr], parameter[name[self], constant[_usergetter]]] begin[:] variable[usergetter] assign[=] name[self]._usergetter variable[validator_class] assign[=] name[self]._validator_class if compare[name[validator_class] is constant[None]] begin[:] variable[validator_class] assign[=] name[OAuth2RequestValidator] variable[validator] assign[=] call[name[validator_class], parameter[]] name[self]._validator assign[=] name[validator] return[call[name[Server], parameter[name[validator]]]] <ast.Raise object at 0x7da1b0315180>
keyword[def] identifier[server] ( identifier[self] ): literal[string] identifier[expires_in] = identifier[self] . identifier[app] . identifier[config] . identifier[get] ( literal[string] ) identifier[token_generator] = identifier[self] . identifier[app] . identifier[config] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[token_generator] keyword[and] keyword[not] identifier[callable] ( identifier[token_generator] ): identifier[token_generator] = identifier[import_string] ( identifier[token_generator] ) identifier[refresh_token_generator] = identifier[self] . identifier[app] . identifier[config] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[refresh_token_generator] keyword[and] keyword[not] identifier[callable] ( identifier[refresh_token_generator] ): identifier[refresh_token_generator] = identifier[import_string] ( identifier[refresh_token_generator] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[return] identifier[Server] ( identifier[self] . identifier[_validator] , identifier[token_expires_in] = identifier[expires_in] , identifier[token_generator] = identifier[token_generator] , identifier[refresh_token_generator] = identifier[refresh_token_generator] , ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[usergetter] = keyword[None] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[usergetter] = identifier[self] . identifier[_usergetter] identifier[validator_class] = identifier[self] . identifier[_validator_class] keyword[if] identifier[validator_class] keyword[is] keyword[None] : identifier[validator_class] = identifier[OAuth2RequestValidator] identifier[validator] = identifier[validator_class] ( identifier[clientgetter] = identifier[self] . identifier[_clientgetter] , identifier[tokengetter] = identifier[self] . identifier[_tokengetter] , identifier[grantgetter] = identifier[self] . identifier[_grantgetter] , identifier[usergetter] = identifier[usergetter] , identifier[tokensetter] = identifier[self] . identifier[_tokensetter] , identifier[grantsetter] = identifier[self] . identifier[_grantsetter] , ) identifier[self] . identifier[_validator] = identifier[validator] keyword[return] identifier[Server] ( identifier[validator] , identifier[token_expires_in] = identifier[expires_in] , identifier[token_generator] = identifier[token_generator] , identifier[refresh_token_generator] = identifier[refresh_token_generator] , ) keyword[raise] identifier[RuntimeError] ( literal[string] )
def server(self): """ All in one endpoints. This property is created automaticly if you have implemented all the getters and setters. However, if you are not satisfied with the getter and setter, you can create a validator with :class:`OAuth2RequestValidator`:: class MyValidator(OAuth2RequestValidator): def validate_client_id(self, client_id): # do something return True And assign the validator for the provider:: oauth._validator = MyValidator() """ expires_in = self.app.config.get('OAUTH2_PROVIDER_TOKEN_EXPIRES_IN') token_generator = self.app.config.get('OAUTH2_PROVIDER_TOKEN_GENERATOR', None) if token_generator and (not callable(token_generator)): token_generator = import_string(token_generator) # depends on [control=['if'], data=[]] refresh_token_generator = self.app.config.get('OAUTH2_PROVIDER_REFRESH_TOKEN_GENERATOR', None) if refresh_token_generator and (not callable(refresh_token_generator)): refresh_token_generator = import_string(refresh_token_generator) # depends on [control=['if'], data=[]] if hasattr(self, '_validator'): return Server(self._validator, token_expires_in=expires_in, token_generator=token_generator, refresh_token_generator=refresh_token_generator) # depends on [control=['if'], data=[]] if hasattr(self, '_clientgetter') and hasattr(self, '_tokengetter') and hasattr(self, '_tokensetter') and hasattr(self, '_grantgetter') and hasattr(self, '_grantsetter'): usergetter = None if hasattr(self, '_usergetter'): usergetter = self._usergetter # depends on [control=['if'], data=[]] validator_class = self._validator_class if validator_class is None: validator_class = OAuth2RequestValidator # depends on [control=['if'], data=['validator_class']] validator = validator_class(clientgetter=self._clientgetter, tokengetter=self._tokengetter, grantgetter=self._grantgetter, usergetter=usergetter, tokensetter=self._tokensetter, grantsetter=self._grantsetter) self._validator = validator return Server(validator, token_expires_in=expires_in, token_generator=token_generator, refresh_token_generator=refresh_token_generator) # depends on [control=['if'], data=[]] raise RuntimeError('application not bound to required getters')
def periodogram(self): """An alias to :class:`~spectrum.periodogram.Periodogram` The parameters are extracted from the attributes. Relevant attributes ares :attr:`window`, attr:`sampling`, attr:`NFFT`, attr:`scale_by_freq`, :attr:`detrend`. .. plot:: :width: 80% :include-source: from spectrum import datasets from spectrum import FourierSpectrum s = FourierSpectrum(datasets.data_cosine(), sampling=1024, NFFT=512) s.periodogram() s.plot() """ from .periodogram import speriodogram psd = speriodogram(self.data, window=self.window, sampling=self.sampling, NFFT=self.NFFT, scale_by_freq=self.scale_by_freq, detrend=self.detrend) self.psd = psd
def function[periodogram, parameter[self]]: constant[An alias to :class:`~spectrum.periodogram.Periodogram` The parameters are extracted from the attributes. Relevant attributes ares :attr:`window`, attr:`sampling`, attr:`NFFT`, attr:`scale_by_freq`, :attr:`detrend`. .. plot:: :width: 80% :include-source: from spectrum import datasets from spectrum import FourierSpectrum s = FourierSpectrum(datasets.data_cosine(), sampling=1024, NFFT=512) s.periodogram() s.plot() ] from relative_module[periodogram] import module[speriodogram] variable[psd] assign[=] call[name[speriodogram], parameter[name[self].data]] name[self].psd assign[=] name[psd]
keyword[def] identifier[periodogram] ( identifier[self] ): literal[string] keyword[from] . identifier[periodogram] keyword[import] identifier[speriodogram] identifier[psd] = identifier[speriodogram] ( identifier[self] . identifier[data] , identifier[window] = identifier[self] . identifier[window] , identifier[sampling] = identifier[self] . identifier[sampling] , identifier[NFFT] = identifier[self] . identifier[NFFT] , identifier[scale_by_freq] = identifier[self] . identifier[scale_by_freq] , identifier[detrend] = identifier[self] . identifier[detrend] ) identifier[self] . identifier[psd] = identifier[psd]
def periodogram(self): """An alias to :class:`~spectrum.periodogram.Periodogram` The parameters are extracted from the attributes. Relevant attributes ares :attr:`window`, attr:`sampling`, attr:`NFFT`, attr:`scale_by_freq`, :attr:`detrend`. .. plot:: :width: 80% :include-source: from spectrum import datasets from spectrum import FourierSpectrum s = FourierSpectrum(datasets.data_cosine(), sampling=1024, NFFT=512) s.periodogram() s.plot() """ from .periodogram import speriodogram psd = speriodogram(self.data, window=self.window, sampling=self.sampling, NFFT=self.NFFT, scale_by_freq=self.scale_by_freq, detrend=self.detrend) self.psd = psd
def full_upload(self, _type, block_num): """ Uploads a full block body from AG. The whole block (including header and footer) is copied into the user buffer. :param block_num: Number of Block """ _buffer = buffer_type() size = c_int(sizeof(_buffer)) block_type = snap7.snap7types.block_types[_type] result = self.library.Cli_FullUpload(self.pointer, block_type, block_num, byref(_buffer), byref(size)) check_error(result, context="client") return bytearray(_buffer), size.value
def function[full_upload, parameter[self, _type, block_num]]: constant[ Uploads a full block body from AG. The whole block (including header and footer) is copied into the user buffer. :param block_num: Number of Block ] variable[_buffer] assign[=] call[name[buffer_type], parameter[]] variable[size] assign[=] call[name[c_int], parameter[call[name[sizeof], parameter[name[_buffer]]]]] variable[block_type] assign[=] call[name[snap7].snap7types.block_types][name[_type]] variable[result] assign[=] call[name[self].library.Cli_FullUpload, parameter[name[self].pointer, name[block_type], name[block_num], call[name[byref], parameter[name[_buffer]]], call[name[byref], parameter[name[size]]]]] call[name[check_error], parameter[name[result]]] return[tuple[[<ast.Call object at 0x7da204347d90>, <ast.Attribute object at 0x7da204344190>]]]
keyword[def] identifier[full_upload] ( identifier[self] , identifier[_type] , identifier[block_num] ): literal[string] identifier[_buffer] = identifier[buffer_type] () identifier[size] = identifier[c_int] ( identifier[sizeof] ( identifier[_buffer] )) identifier[block_type] = identifier[snap7] . identifier[snap7types] . identifier[block_types] [ identifier[_type] ] identifier[result] = identifier[self] . identifier[library] . identifier[Cli_FullUpload] ( identifier[self] . identifier[pointer] , identifier[block_type] , identifier[block_num] , identifier[byref] ( identifier[_buffer] ), identifier[byref] ( identifier[size] )) identifier[check_error] ( identifier[result] , identifier[context] = literal[string] ) keyword[return] identifier[bytearray] ( identifier[_buffer] ), identifier[size] . identifier[value]
def full_upload(self, _type, block_num): """ Uploads a full block body from AG. The whole block (including header and footer) is copied into the user buffer. :param block_num: Number of Block """ _buffer = buffer_type() size = c_int(sizeof(_buffer)) block_type = snap7.snap7types.block_types[_type] result = self.library.Cli_FullUpload(self.pointer, block_type, block_num, byref(_buffer), byref(size)) check_error(result, context='client') return (bytearray(_buffer), size.value)
def format_name(self): """Formats the media file based on enhanced metadata. The actual name of the file and even the name of the directory structure where the file is to be stored. """ self.formatted_filename = formatter.format_filename( self.series_name, self.season_number, self.episode_numbers, self.episode_names, self.extension) self.formatted_dirname = self.location if cfg.CONF.move_files_enabled: self.formatted_dirname = formatter.format_location( self.series_name, self.season_number) self.out_location = os.path.join(self.formatted_dirname, self.formatted_filename)
def function[format_name, parameter[self]]: constant[Formats the media file based on enhanced metadata. The actual name of the file and even the name of the directory structure where the file is to be stored. ] name[self].formatted_filename assign[=] call[name[formatter].format_filename, parameter[name[self].series_name, name[self].season_number, name[self].episode_numbers, name[self].episode_names, name[self].extension]] name[self].formatted_dirname assign[=] name[self].location if name[cfg].CONF.move_files_enabled begin[:] name[self].formatted_dirname assign[=] call[name[formatter].format_location, parameter[name[self].series_name, name[self].season_number]] name[self].out_location assign[=] call[name[os].path.join, parameter[name[self].formatted_dirname, name[self].formatted_filename]]
keyword[def] identifier[format_name] ( identifier[self] ): literal[string] identifier[self] . identifier[formatted_filename] = identifier[formatter] . identifier[format_filename] ( identifier[self] . identifier[series_name] , identifier[self] . identifier[season_number] , identifier[self] . identifier[episode_numbers] , identifier[self] . identifier[episode_names] , identifier[self] . identifier[extension] ) identifier[self] . identifier[formatted_dirname] = identifier[self] . identifier[location] keyword[if] identifier[cfg] . identifier[CONF] . identifier[move_files_enabled] : identifier[self] . identifier[formatted_dirname] = identifier[formatter] . identifier[format_location] ( identifier[self] . identifier[series_name] , identifier[self] . identifier[season_number] ) identifier[self] . identifier[out_location] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[formatted_dirname] , identifier[self] . identifier[formatted_filename] )
def format_name(self): """Formats the media file based on enhanced metadata. The actual name of the file and even the name of the directory structure where the file is to be stored. """ self.formatted_filename = formatter.format_filename(self.series_name, self.season_number, self.episode_numbers, self.episode_names, self.extension) self.formatted_dirname = self.location if cfg.CONF.move_files_enabled: self.formatted_dirname = formatter.format_location(self.series_name, self.season_number) # depends on [control=['if'], data=[]] self.out_location = os.path.join(self.formatted_dirname, self.formatted_filename)
def find_visible_elements(driver, selector, by=By.CSS_SELECTOR): """ Finds all WebElements that match a selector and are visible. Similar to webdriver.find_elements. @Params driver - the webdriver object (required) selector - the locator that is used to search the DOM (required) by - the method to search for the locator (Default: By.CSS_SELECTOR) """ elements = driver.find_elements(by=by, value=selector) return [element for element in elements if element.is_displayed()]
def function[find_visible_elements, parameter[driver, selector, by]]: constant[ Finds all WebElements that match a selector and are visible. Similar to webdriver.find_elements. @Params driver - the webdriver object (required) selector - the locator that is used to search the DOM (required) by - the method to search for the locator (Default: By.CSS_SELECTOR) ] variable[elements] assign[=] call[name[driver].find_elements, parameter[]] return[<ast.ListComp object at 0x7da1b1b637f0>]
keyword[def] identifier[find_visible_elements] ( identifier[driver] , identifier[selector] , identifier[by] = identifier[By] . identifier[CSS_SELECTOR] ): literal[string] identifier[elements] = identifier[driver] . identifier[find_elements] ( identifier[by] = identifier[by] , identifier[value] = identifier[selector] ) keyword[return] [ identifier[element] keyword[for] identifier[element] keyword[in] identifier[elements] keyword[if] identifier[element] . identifier[is_displayed] ()]
def find_visible_elements(driver, selector, by=By.CSS_SELECTOR): """ Finds all WebElements that match a selector and are visible. Similar to webdriver.find_elements. @Params driver - the webdriver object (required) selector - the locator that is used to search the DOM (required) by - the method to search for the locator (Default: By.CSS_SELECTOR) """ elements = driver.find_elements(by=by, value=selector) return [element for element in elements if element.is_displayed()]
def export_cfg(obj, file_name): """ Exports curves and surfaces in libconfig format. .. note:: Requires `libconf <https://pypi.org/project/libconf/>`_ package. Libconfig format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_ as a way to input shape data from the command line. :param obj: input geometry :type obj: abstract.SplineGeometry, multi.AbstractContainer :param file_name: name of the output file :type file_name: str :raises GeomdlException: an error occurred writing the file """ def callback(data): return libconf.dumps(data) # Check if it is possible to import 'libconf' try: import libconf except ImportError: raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf") # Export data exported_data = exch.export_dict_str(obj=obj, callback=callback) # Write to file return exch.write_file(file_name, exported_data)
def function[export_cfg, parameter[obj, file_name]]: constant[ Exports curves and surfaces in libconfig format. .. note:: Requires `libconf <https://pypi.org/project/libconf/>`_ package. Libconfig format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_ as a way to input shape data from the command line. :param obj: input geometry :type obj: abstract.SplineGeometry, multi.AbstractContainer :param file_name: name of the output file :type file_name: str :raises GeomdlException: an error occurred writing the file ] def function[callback, parameter[data]]: return[call[name[libconf].dumps, parameter[name[data]]]] <ast.Try object at 0x7da1b16b7580> variable[exported_data] assign[=] call[name[exch].export_dict_str, parameter[]] return[call[name[exch].write_file, parameter[name[file_name], name[exported_data]]]]
keyword[def] identifier[export_cfg] ( identifier[obj] , identifier[file_name] ): literal[string] keyword[def] identifier[callback] ( identifier[data] ): keyword[return] identifier[libconf] . identifier[dumps] ( identifier[data] ) keyword[try] : keyword[import] identifier[libconf] keyword[except] identifier[ImportError] : keyword[raise] identifier[exch] . identifier[GeomdlException] ( literal[string] ) identifier[exported_data] = identifier[exch] . identifier[export_dict_str] ( identifier[obj] = identifier[obj] , identifier[callback] = identifier[callback] ) keyword[return] identifier[exch] . identifier[write_file] ( identifier[file_name] , identifier[exported_data] )
def export_cfg(obj, file_name): """ Exports curves and surfaces in libconfig format. .. note:: Requires `libconf <https://pypi.org/project/libconf/>`_ package. Libconfig format is also used by the `geomdl command-line application <https://github.com/orbingol/geomdl-cli>`_ as a way to input shape data from the command line. :param obj: input geometry :type obj: abstract.SplineGeometry, multi.AbstractContainer :param file_name: name of the output file :type file_name: str :raises GeomdlException: an error occurred writing the file """ def callback(data): return libconf.dumps(data) # Check if it is possible to import 'libconf' try: import libconf # depends on [control=['try'], data=[]] except ImportError: raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf") # depends on [control=['except'], data=[]] # Export data exported_data = exch.export_dict_str(obj=obj, callback=callback) # Write to file return exch.write_file(file_name, exported_data)
def create_pathlist(self, initial_pathlist): """ Add to pathlist Python library paths to be skipped from module reloading. """ # Get standard installation paths try: paths = sysconfig.get_paths() standard_paths = [paths['stdlib'], paths['purelib'], paths['scripts'], paths['data']] except Exception: standard_paths = [] # Get user installation path # See Spyder issue 8776 try: import site if getattr(site, 'getusersitepackages', False): # Virtualenvs don't have this function but # conda envs do user_path = [site.getusersitepackages()] elif getattr(site, 'USER_SITE', False): # However, it seems virtualenvs have this # constant user_path = [site.USER_SITE] else: user_path = [] except Exception: user_path = [] return initial_pathlist + standard_paths + user_path
def function[create_pathlist, parameter[self, initial_pathlist]]: constant[ Add to pathlist Python library paths to be skipped from module reloading. ] <ast.Try object at 0x7da18bc71390> <ast.Try object at 0x7da18bc709a0> return[binary_operation[binary_operation[name[initial_pathlist] + name[standard_paths]] + name[user_path]]]
keyword[def] identifier[create_pathlist] ( identifier[self] , identifier[initial_pathlist] ): literal[string] keyword[try] : identifier[paths] = identifier[sysconfig] . identifier[get_paths] () identifier[standard_paths] =[ identifier[paths] [ literal[string] ], identifier[paths] [ literal[string] ], identifier[paths] [ literal[string] ], identifier[paths] [ literal[string] ]] keyword[except] identifier[Exception] : identifier[standard_paths] =[] keyword[try] : keyword[import] identifier[site] keyword[if] identifier[getattr] ( identifier[site] , literal[string] , keyword[False] ): identifier[user_path] =[ identifier[site] . identifier[getusersitepackages] ()] keyword[elif] identifier[getattr] ( identifier[site] , literal[string] , keyword[False] ): identifier[user_path] =[ identifier[site] . identifier[USER_SITE] ] keyword[else] : identifier[user_path] =[] keyword[except] identifier[Exception] : identifier[user_path] =[] keyword[return] identifier[initial_pathlist] + identifier[standard_paths] + identifier[user_path]
def create_pathlist(self, initial_pathlist): """ Add to pathlist Python library paths to be skipped from module reloading. """ # Get standard installation paths try: paths = sysconfig.get_paths() standard_paths = [paths['stdlib'], paths['purelib'], paths['scripts'], paths['data']] # depends on [control=['try'], data=[]] except Exception: standard_paths = [] # depends on [control=['except'], data=[]] # Get user installation path # See Spyder issue 8776 try: import site if getattr(site, 'getusersitepackages', False): # Virtualenvs don't have this function but # conda envs do user_path = [site.getusersitepackages()] # depends on [control=['if'], data=[]] elif getattr(site, 'USER_SITE', False): # However, it seems virtualenvs have this # constant user_path = [site.USER_SITE] # depends on [control=['if'], data=[]] else: user_path = [] # depends on [control=['try'], data=[]] except Exception: user_path = [] # depends on [control=['except'], data=[]] return initial_pathlist + standard_paths + user_path
def sync_request(self, command, payload, retry=2): """Request data.""" loop = asyncio.get_event_loop() task = loop.create_task(self.request(command, payload, retry)) return loop.run_until_complete(task)
def function[sync_request, parameter[self, command, payload, retry]]: constant[Request data.] variable[loop] assign[=] call[name[asyncio].get_event_loop, parameter[]] variable[task] assign[=] call[name[loop].create_task, parameter[call[name[self].request, parameter[name[command], name[payload], name[retry]]]]] return[call[name[loop].run_until_complete, parameter[name[task]]]]
keyword[def] identifier[sync_request] ( identifier[self] , identifier[command] , identifier[payload] , identifier[retry] = literal[int] ): literal[string] identifier[loop] = identifier[asyncio] . identifier[get_event_loop] () identifier[task] = identifier[loop] . identifier[create_task] ( identifier[self] . identifier[request] ( identifier[command] , identifier[payload] , identifier[retry] )) keyword[return] identifier[loop] . identifier[run_until_complete] ( identifier[task] )
def sync_request(self, command, payload, retry=2): """Request data.""" loop = asyncio.get_event_loop() task = loop.create_task(self.request(command, payload, retry)) return loop.run_until_complete(task)
def get_partitions( self, schema, table_name, filter=None): """ Returns a list of all partitions in a table. Works only for tables with less than 32767 (java short max val). For subpartitioned table, the number might easily exceed this. >>> hh = HiveMetastoreHook() >>> t = 'static_babynames_partitioned' >>> parts = hh.get_partitions(schema='airflow', table_name=t) >>> len(parts) 1 >>> parts [{'ds': '2015-01-01'}] """ with self.metastore as client: table = client.get_table(dbname=schema, tbl_name=table_name) if len(table.partitionKeys) == 0: raise AirflowException("The table isn't partitioned") else: if filter: parts = client.get_partitions_by_filter( db_name=schema, tbl_name=table_name, filter=filter, max_parts=HiveMetastoreHook.MAX_PART_COUNT) else: parts = client.get_partitions( db_name=schema, tbl_name=table_name, max_parts=HiveMetastoreHook.MAX_PART_COUNT) pnames = [p.name for p in table.partitionKeys] return [dict(zip(pnames, p.values)) for p in parts]
def function[get_partitions, parameter[self, schema, table_name, filter]]: constant[ Returns a list of all partitions in a table. Works only for tables with less than 32767 (java short max val). For subpartitioned table, the number might easily exceed this. >>> hh = HiveMetastoreHook() >>> t = 'static_babynames_partitioned' >>> parts = hh.get_partitions(schema='airflow', table_name=t) >>> len(parts) 1 >>> parts [{'ds': '2015-01-01'}] ] with name[self].metastore begin[:] variable[table] assign[=] call[name[client].get_table, parameter[]] if compare[call[name[len], parameter[name[table].partitionKeys]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b0558ac0>
keyword[def] identifier[get_partitions] ( identifier[self] , identifier[schema] , identifier[table_name] , identifier[filter] = keyword[None] ): literal[string] keyword[with] identifier[self] . identifier[metastore] keyword[as] identifier[client] : identifier[table] = identifier[client] . identifier[get_table] ( identifier[dbname] = identifier[schema] , identifier[tbl_name] = identifier[table_name] ) keyword[if] identifier[len] ( identifier[table] . identifier[partitionKeys] )== literal[int] : keyword[raise] identifier[AirflowException] ( literal[string] ) keyword[else] : keyword[if] identifier[filter] : identifier[parts] = identifier[client] . identifier[get_partitions_by_filter] ( identifier[db_name] = identifier[schema] , identifier[tbl_name] = identifier[table_name] , identifier[filter] = identifier[filter] , identifier[max_parts] = identifier[HiveMetastoreHook] . identifier[MAX_PART_COUNT] ) keyword[else] : identifier[parts] = identifier[client] . identifier[get_partitions] ( identifier[db_name] = identifier[schema] , identifier[tbl_name] = identifier[table_name] , identifier[max_parts] = identifier[HiveMetastoreHook] . identifier[MAX_PART_COUNT] ) identifier[pnames] =[ identifier[p] . identifier[name] keyword[for] identifier[p] keyword[in] identifier[table] . identifier[partitionKeys] ] keyword[return] [ identifier[dict] ( identifier[zip] ( identifier[pnames] , identifier[p] . identifier[values] )) keyword[for] identifier[p] keyword[in] identifier[parts] ]
def get_partitions(self, schema, table_name, filter=None): """ Returns a list of all partitions in a table. Works only for tables with less than 32767 (java short max val). For subpartitioned table, the number might easily exceed this. >>> hh = HiveMetastoreHook() >>> t = 'static_babynames_partitioned' >>> parts = hh.get_partitions(schema='airflow', table_name=t) >>> len(parts) 1 >>> parts [{'ds': '2015-01-01'}] """ with self.metastore as client: table = client.get_table(dbname=schema, tbl_name=table_name) if len(table.partitionKeys) == 0: raise AirflowException("The table isn't partitioned") # depends on [control=['if'], data=[]] else: if filter: parts = client.get_partitions_by_filter(db_name=schema, tbl_name=table_name, filter=filter, max_parts=HiveMetastoreHook.MAX_PART_COUNT) # depends on [control=['if'], data=[]] else: parts = client.get_partitions(db_name=schema, tbl_name=table_name, max_parts=HiveMetastoreHook.MAX_PART_COUNT) pnames = [p.name for p in table.partitionKeys] return [dict(zip(pnames, p.values)) for p in parts] # depends on [control=['with'], data=['client']]
def _format_multirow(self, row, ilevels, i, rows): r""" Check following rows, whether row should be a multirow e.g.: becomes: a & 0 & \multirow{2}{*}{a} & 0 & & 1 & & 1 & b & 0 & \cline{1-2} b & 0 & """ for j in range(ilevels): if row[j].strip(): nrow = 1 for r in rows[i + 1:]: if not r[j].strip(): nrow += 1 else: break if nrow > 1: # overwrite non-multirow entry row[j] = '\\multirow{{{nrow:d}}}{{*}}{{{row:s}}}'.format( nrow=nrow, row=row[j].strip()) # save when to end the current block with \cline self.clinebuf.append([i + nrow - 1, j + 1]) return row
def function[_format_multirow, parameter[self, row, ilevels, i, rows]]: constant[ Check following rows, whether row should be a multirow e.g.: becomes: a & 0 & \multirow{2}{*}{a} & 0 & & 1 & & 1 & b & 0 & \cline{1-2} b & 0 & ] for taget[name[j]] in starred[call[name[range], parameter[name[ilevels]]]] begin[:] if call[call[name[row]][name[j]].strip, parameter[]] begin[:] variable[nrow] assign[=] constant[1] for taget[name[r]] in starred[call[name[rows]][<ast.Slice object at 0x7da18f00dab0>]] begin[:] if <ast.UnaryOp object at 0x7da18f00d900> begin[:] <ast.AugAssign object at 0x7da18f00e500> if compare[name[nrow] greater[>] constant[1]] begin[:] call[name[row]][name[j]] assign[=] call[constant[\multirow{{{nrow:d}}}{{*}}{{{row:s}}}].format, parameter[]] call[name[self].clinebuf.append, parameter[list[[<ast.BinOp object at 0x7da18f00e0e0>, <ast.BinOp object at 0x7da18f00d240>]]]] return[name[row]]
keyword[def] identifier[_format_multirow] ( identifier[self] , identifier[row] , identifier[ilevels] , identifier[i] , identifier[rows] ): literal[string] keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[ilevels] ): keyword[if] identifier[row] [ identifier[j] ]. identifier[strip] (): identifier[nrow] = literal[int] keyword[for] identifier[r] keyword[in] identifier[rows] [ identifier[i] + literal[int] :]: keyword[if] keyword[not] identifier[r] [ identifier[j] ]. identifier[strip] (): identifier[nrow] += literal[int] keyword[else] : keyword[break] keyword[if] identifier[nrow] > literal[int] : identifier[row] [ identifier[j] ]= literal[string] . identifier[format] ( identifier[nrow] = identifier[nrow] , identifier[row] = identifier[row] [ identifier[j] ]. identifier[strip] ()) identifier[self] . identifier[clinebuf] . identifier[append] ([ identifier[i] + identifier[nrow] - literal[int] , identifier[j] + literal[int] ]) keyword[return] identifier[row]
def _format_multirow(self, row, ilevels, i, rows): """ Check following rows, whether row should be a multirow e.g.: becomes: a & 0 & \\multirow{2}{*}{a} & 0 & & 1 & & 1 & b & 0 & \\cline{1-2} b & 0 & """ for j in range(ilevels): if row[j].strip(): nrow = 1 for r in rows[i + 1:]: if not r[j].strip(): nrow += 1 # depends on [control=['if'], data=[]] else: break # depends on [control=['for'], data=['r']] if nrow > 1: # overwrite non-multirow entry row[j] = '\\multirow{{{nrow:d}}}{{*}}{{{row:s}}}'.format(nrow=nrow, row=row[j].strip()) # save when to end the current block with \cline self.clinebuf.append([i + nrow - 1, j + 1]) # depends on [control=['if'], data=['nrow']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] return row
def record_event(self, event: Event) -> None: """ Record the event async. """ from polyaxon.celery_api import celery_app from polyaxon.settings import EventsCeleryTasks if not event.ref_id: event.ref_id = self.get_ref_id() serialized_event = event.serialize(dumps=False, include_actor_name=True, include_instance_info=True) celery_app.send_task(EventsCeleryTasks.EVENTS_TRACK, kwargs={'event': serialized_event}) celery_app.send_task(EventsCeleryTasks.EVENTS_LOG, kwargs={'event': serialized_event}) celery_app.send_task(EventsCeleryTasks.EVENTS_NOTIFY, kwargs={'event': serialized_event}) # We include the instance in the serialized event for executor serialized_event['instance'] = event.instance self.executor.record(event_type=event.event_type, event_data=serialized_event)
def function[record_event, parameter[self, event]]: constant[ Record the event async. ] from relative_module[polyaxon.celery_api] import module[celery_app] from relative_module[polyaxon.settings] import module[EventsCeleryTasks] if <ast.UnaryOp object at 0x7da20c991780> begin[:] name[event].ref_id assign[=] call[name[self].get_ref_id, parameter[]] variable[serialized_event] assign[=] call[name[event].serialize, parameter[]] call[name[celery_app].send_task, parameter[name[EventsCeleryTasks].EVENTS_TRACK]] call[name[celery_app].send_task, parameter[name[EventsCeleryTasks].EVENTS_LOG]] call[name[celery_app].send_task, parameter[name[EventsCeleryTasks].EVENTS_NOTIFY]] call[name[serialized_event]][constant[instance]] assign[=] name[event].instance call[name[self].executor.record, parameter[]]
keyword[def] identifier[record_event] ( identifier[self] , identifier[event] : identifier[Event] )-> keyword[None] : literal[string] keyword[from] identifier[polyaxon] . identifier[celery_api] keyword[import] identifier[celery_app] keyword[from] identifier[polyaxon] . identifier[settings] keyword[import] identifier[EventsCeleryTasks] keyword[if] keyword[not] identifier[event] . identifier[ref_id] : identifier[event] . identifier[ref_id] = identifier[self] . identifier[get_ref_id] () identifier[serialized_event] = identifier[event] . identifier[serialize] ( identifier[dumps] = keyword[False] , identifier[include_actor_name] = keyword[True] , identifier[include_instance_info] = keyword[True] ) identifier[celery_app] . identifier[send_task] ( identifier[EventsCeleryTasks] . identifier[EVENTS_TRACK] , identifier[kwargs] ={ literal[string] : identifier[serialized_event] }) identifier[celery_app] . identifier[send_task] ( identifier[EventsCeleryTasks] . identifier[EVENTS_LOG] , identifier[kwargs] ={ literal[string] : identifier[serialized_event] }) identifier[celery_app] . identifier[send_task] ( identifier[EventsCeleryTasks] . identifier[EVENTS_NOTIFY] , identifier[kwargs] ={ literal[string] : identifier[serialized_event] }) identifier[serialized_event] [ literal[string] ]= identifier[event] . identifier[instance] identifier[self] . identifier[executor] . identifier[record] ( identifier[event_type] = identifier[event] . identifier[event_type] , identifier[event_data] = identifier[serialized_event] )
def record_event(self, event: Event) -> None: """ Record the event async. """ from polyaxon.celery_api import celery_app from polyaxon.settings import EventsCeleryTasks if not event.ref_id: event.ref_id = self.get_ref_id() # depends on [control=['if'], data=[]] serialized_event = event.serialize(dumps=False, include_actor_name=True, include_instance_info=True) celery_app.send_task(EventsCeleryTasks.EVENTS_TRACK, kwargs={'event': serialized_event}) celery_app.send_task(EventsCeleryTasks.EVENTS_LOG, kwargs={'event': serialized_event}) celery_app.send_task(EventsCeleryTasks.EVENTS_NOTIFY, kwargs={'event': serialized_event}) # We include the instance in the serialized event for executor serialized_event['instance'] = event.instance self.executor.record(event_type=event.event_type, event_data=serialized_event)
def registerFunction(self, name, f, returnType=None): """An alias for :func:`spark.udf.register`. See :meth:`pyspark.sql.UDFRegistration.register`. .. note:: Deprecated in 2.3.0. Use :func:`spark.udf.register` instead. """ warnings.warn( "Deprecated in 2.3.0. Use spark.udf.register instead.", DeprecationWarning) return self.sparkSession.udf.register(name, f, returnType)
def function[registerFunction, parameter[self, name, f, returnType]]: constant[An alias for :func:`spark.udf.register`. See :meth:`pyspark.sql.UDFRegistration.register`. .. note:: Deprecated in 2.3.0. Use :func:`spark.udf.register` instead. ] call[name[warnings].warn, parameter[constant[Deprecated in 2.3.0. Use spark.udf.register instead.], name[DeprecationWarning]]] return[call[name[self].sparkSession.udf.register, parameter[name[name], name[f], name[returnType]]]]
keyword[def] identifier[registerFunction] ( identifier[self] , identifier[name] , identifier[f] , identifier[returnType] = keyword[None] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] ) keyword[return] identifier[self] . identifier[sparkSession] . identifier[udf] . identifier[register] ( identifier[name] , identifier[f] , identifier[returnType] )
def registerFunction(self, name, f, returnType=None): """An alias for :func:`spark.udf.register`. See :meth:`pyspark.sql.UDFRegistration.register`. .. note:: Deprecated in 2.3.0. Use :func:`spark.udf.register` instead. """ warnings.warn('Deprecated in 2.3.0. Use spark.udf.register instead.', DeprecationWarning) return self.sparkSession.udf.register(name, f, returnType)
def removeItem( self ): """ Removes the item from the menu. """ item = self.uiMenuTREE.currentItem() if ( not item ): return opts = QMessageBox.Yes | QMessageBox.No answer = QMessageBox.question( self, 'Remove Item', 'Are you sure you want to remove this '\ ' item?', opts ) if ( answer == QMessageBox.Yes ): parent = item.parent() if ( parent ): parent.takeChild(parent.indexOfChild(item)) else: tree = self.uiMenuTREE tree.takeTopLevelItem(tree.indexOfTopLevelItem(item))
def function[removeItem, parameter[self]]: constant[ Removes the item from the menu. ] variable[item] assign[=] call[name[self].uiMenuTREE.currentItem, parameter[]] if <ast.UnaryOp object at 0x7da1b253a620> begin[:] return[None] variable[opts] assign[=] binary_operation[name[QMessageBox].Yes <ast.BitOr object at 0x7da2590d6aa0> name[QMessageBox].No] variable[answer] assign[=] call[name[QMessageBox].question, parameter[name[self], constant[Remove Item], constant[Are you sure you want to remove this item?], name[opts]]] if compare[name[answer] equal[==] name[QMessageBox].Yes] begin[:] variable[parent] assign[=] call[name[item].parent, parameter[]] if name[parent] begin[:] call[name[parent].takeChild, parameter[call[name[parent].indexOfChild, parameter[name[item]]]]]
keyword[def] identifier[removeItem] ( identifier[self] ): literal[string] identifier[item] = identifier[self] . identifier[uiMenuTREE] . identifier[currentItem] () keyword[if] ( keyword[not] identifier[item] ): keyword[return] identifier[opts] = identifier[QMessageBox] . identifier[Yes] | identifier[QMessageBox] . identifier[No] identifier[answer] = identifier[QMessageBox] . identifier[question] ( identifier[self] , literal[string] , literal[string] literal[string] , identifier[opts] ) keyword[if] ( identifier[answer] == identifier[QMessageBox] . identifier[Yes] ): identifier[parent] = identifier[item] . identifier[parent] () keyword[if] ( identifier[parent] ): identifier[parent] . identifier[takeChild] ( identifier[parent] . identifier[indexOfChild] ( identifier[item] )) keyword[else] : identifier[tree] = identifier[self] . identifier[uiMenuTREE] identifier[tree] . identifier[takeTopLevelItem] ( identifier[tree] . identifier[indexOfTopLevelItem] ( identifier[item] ))
def removeItem(self): """ Removes the item from the menu. """ item = self.uiMenuTREE.currentItem() if not item: return # depends on [control=['if'], data=[]] opts = QMessageBox.Yes | QMessageBox.No answer = QMessageBox.question(self, 'Remove Item', 'Are you sure you want to remove this item?', opts) if answer == QMessageBox.Yes: parent = item.parent() if parent: parent.takeChild(parent.indexOfChild(item)) # depends on [control=['if'], data=[]] else: tree = self.uiMenuTREE tree.takeTopLevelItem(tree.indexOfTopLevelItem(item)) # depends on [control=['if'], data=[]]
def update_user(self, user_id, roles=None, netmask=None, secret=None, pubkey=None): """Update user. Returns the raw response object. Arguments: user_id: User id of user to update roles: Role netmask: Limit user connections by netmask, for example 192.168.1.0/24 secret: Secret used when authenticating with mCASH pubkey: RSA key used for authenticating by signing """ arguments = {'roles': roles, 'netmask': netmask, 'secret': secret, 'pubkey': pubkey} return self.do_req('PUT', self.merchant_api_base_url + '/user/' + user_id + '/', arguments)
def function[update_user, parameter[self, user_id, roles, netmask, secret, pubkey]]: constant[Update user. Returns the raw response object. Arguments: user_id: User id of user to update roles: Role netmask: Limit user connections by netmask, for example 192.168.1.0/24 secret: Secret used when authenticating with mCASH pubkey: RSA key used for authenticating by signing ] variable[arguments] assign[=] dictionary[[<ast.Constant object at 0x7da2044c1000>, <ast.Constant object at 0x7da2044c0df0>, <ast.Constant object at 0x7da2044c2a70>, <ast.Constant object at 0x7da2044c3970>], [<ast.Name object at 0x7da2044c2ad0>, <ast.Name object at 0x7da2044c21a0>, <ast.Name object at 0x7da2044c2080>, <ast.Name object at 0x7da2044c0b20>]] return[call[name[self].do_req, parameter[constant[PUT], binary_operation[binary_operation[binary_operation[name[self].merchant_api_base_url + constant[/user/]] + name[user_id]] + constant[/]], name[arguments]]]]
keyword[def] identifier[update_user] ( identifier[self] , identifier[user_id] , identifier[roles] = keyword[None] , identifier[netmask] = keyword[None] , identifier[secret] = keyword[None] , identifier[pubkey] = keyword[None] ): literal[string] identifier[arguments] ={ literal[string] : identifier[roles] , literal[string] : identifier[netmask] , literal[string] : identifier[secret] , literal[string] : identifier[pubkey] } keyword[return] identifier[self] . identifier[do_req] ( literal[string] , identifier[self] . identifier[merchant_api_base_url] + literal[string] + identifier[user_id] + literal[string] , identifier[arguments] )
def update_user(self, user_id, roles=None, netmask=None, secret=None, pubkey=None): """Update user. Returns the raw response object. Arguments: user_id: User id of user to update roles: Role netmask: Limit user connections by netmask, for example 192.168.1.0/24 secret: Secret used when authenticating with mCASH pubkey: RSA key used for authenticating by signing """ arguments = {'roles': roles, 'netmask': netmask, 'secret': secret, 'pubkey': pubkey} return self.do_req('PUT', self.merchant_api_base_url + '/user/' + user_id + '/', arguments)
def drop_primary_key(self, table): """Drop a Primary Key constraint for a specific table.""" if self.get_primary_key(table): self.execute('ALTER TABLE {0} DROP PRIMARY KEY'.format(wrap(table)))
def function[drop_primary_key, parameter[self, table]]: constant[Drop a Primary Key constraint for a specific table.] if call[name[self].get_primary_key, parameter[name[table]]] begin[:] call[name[self].execute, parameter[call[constant[ALTER TABLE {0} DROP PRIMARY KEY].format, parameter[call[name[wrap], parameter[name[table]]]]]]]
keyword[def] identifier[drop_primary_key] ( identifier[self] , identifier[table] ): literal[string] keyword[if] identifier[self] . identifier[get_primary_key] ( identifier[table] ): identifier[self] . identifier[execute] ( literal[string] . identifier[format] ( identifier[wrap] ( identifier[table] )))
def drop_primary_key(self, table): """Drop a Primary Key constraint for a specific table.""" if self.get_primary_key(table): self.execute('ALTER TABLE {0} DROP PRIMARY KEY'.format(wrap(table))) # depends on [control=['if'], data=[]]
def _permute_aux_specs(self): """Generate all permutations of the non-core specifications.""" # Convert to attr names that Calc is expecting. calc_aux_mapping = self._NAMES_SUITE_TO_CALC.copy() # Special case: manually add 'library' to mapping calc_aux_mapping[_OBJ_LIB_STR] = None [calc_aux_mapping.pop(core) for core in self._CORE_SPEC_NAMES] specs = self._get_aux_specs() for suite_name, calc_name in calc_aux_mapping.items(): specs[calc_name] = specs.pop(suite_name) return _permuted_dicts_of_specs(specs)
def function[_permute_aux_specs, parameter[self]]: constant[Generate all permutations of the non-core specifications.] variable[calc_aux_mapping] assign[=] call[name[self]._NAMES_SUITE_TO_CALC.copy, parameter[]] call[name[calc_aux_mapping]][name[_OBJ_LIB_STR]] assign[=] constant[None] <ast.ListComp object at 0x7da1b04f9810> variable[specs] assign[=] call[name[self]._get_aux_specs, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b04fa1d0>, <ast.Name object at 0x7da1b04f9d80>]]] in starred[call[name[calc_aux_mapping].items, parameter[]]] begin[:] call[name[specs]][name[calc_name]] assign[=] call[name[specs].pop, parameter[name[suite_name]]] return[call[name[_permuted_dicts_of_specs], parameter[name[specs]]]]
keyword[def] identifier[_permute_aux_specs] ( identifier[self] ): literal[string] identifier[calc_aux_mapping] = identifier[self] . identifier[_NAMES_SUITE_TO_CALC] . identifier[copy] () identifier[calc_aux_mapping] [ identifier[_OBJ_LIB_STR] ]= keyword[None] [ identifier[calc_aux_mapping] . identifier[pop] ( identifier[core] ) keyword[for] identifier[core] keyword[in] identifier[self] . identifier[_CORE_SPEC_NAMES] ] identifier[specs] = identifier[self] . identifier[_get_aux_specs] () keyword[for] identifier[suite_name] , identifier[calc_name] keyword[in] identifier[calc_aux_mapping] . identifier[items] (): identifier[specs] [ identifier[calc_name] ]= identifier[specs] . identifier[pop] ( identifier[suite_name] ) keyword[return] identifier[_permuted_dicts_of_specs] ( identifier[specs] )
def _permute_aux_specs(self): """Generate all permutations of the non-core specifications.""" # Convert to attr names that Calc is expecting. calc_aux_mapping = self._NAMES_SUITE_TO_CALC.copy() # Special case: manually add 'library' to mapping calc_aux_mapping[_OBJ_LIB_STR] = None [calc_aux_mapping.pop(core) for core in self._CORE_SPEC_NAMES] specs = self._get_aux_specs() for (suite_name, calc_name) in calc_aux_mapping.items(): specs[calc_name] = specs.pop(suite_name) # depends on [control=['for'], data=[]] return _permuted_dicts_of_specs(specs)
def ServerLoggingStartupInit(): """Initialize the server logging configuration.""" global LOGGER if local_log: logging.debug("Using local LogInit from %s", local_log) local_log.LogInit() logging.debug("Using local AppLogInit from %s", local_log) LOGGER = local_log.AppLogInit() else: LogInit() LOGGER = AppLogInit()
def function[ServerLoggingStartupInit, parameter[]]: constant[Initialize the server logging configuration.] <ast.Global object at 0x7da18dc078e0> if name[local_log] begin[:] call[name[logging].debug, parameter[constant[Using local LogInit from %s], name[local_log]]] call[name[local_log].LogInit, parameter[]] call[name[logging].debug, parameter[constant[Using local AppLogInit from %s], name[local_log]]] variable[LOGGER] assign[=] call[name[local_log].AppLogInit, parameter[]]
keyword[def] identifier[ServerLoggingStartupInit] (): literal[string] keyword[global] identifier[LOGGER] keyword[if] identifier[local_log] : identifier[logging] . identifier[debug] ( literal[string] , identifier[local_log] ) identifier[local_log] . identifier[LogInit] () identifier[logging] . identifier[debug] ( literal[string] , identifier[local_log] ) identifier[LOGGER] = identifier[local_log] . identifier[AppLogInit] () keyword[else] : identifier[LogInit] () identifier[LOGGER] = identifier[AppLogInit] ()
def ServerLoggingStartupInit(): """Initialize the server logging configuration.""" global LOGGER if local_log: logging.debug('Using local LogInit from %s', local_log) local_log.LogInit() logging.debug('Using local AppLogInit from %s', local_log) LOGGER = local_log.AppLogInit() # depends on [control=['if'], data=[]] else: LogInit() LOGGER = AppLogInit()