code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def netmiko_commands(*commands, **kwargs): ''' .. versionadded:: 2019.2.0 Invoke one or more commands to be executed on the remote device, via Netmiko. Returns a list of strings, with the output from each command. commands A list of commands to be executed. expect_string Regular expression pattern to use for determining end of output. If left blank will default to being based on router prompt. delay_factor: ``1`` Multiplying factor used to adjust delays (default: ``1``). max_loops: ``500`` Controls wait time in conjunction with delay_factor. Will default to be based upon self.timeout. auto_find_prompt: ``True`` Whether it should try to auto-detect the prompt (default: ``True``). strip_prompt: ``True`` Remove the trailing router prompt from the output (default: ``True``). strip_command: ``True`` Remove the echo of the command from the output (default: ``True``). normalize: ``True`` Ensure the proper enter is sent at end of command (default: ``True``). use_textfsm: ``False`` Process command output through TextFSM template (default: ``False``). CLI Example: .. code-block:: bash salt '*' napalm.netmiko_commands 'show version' 'show interfaces' ''' conn = netmiko_conn(**kwargs) ret = [] for cmd in commands: ret.append(conn.send_command(cmd)) return ret
def function[netmiko_commands, parameter[]]: constant[ .. versionadded:: 2019.2.0 Invoke one or more commands to be executed on the remote device, via Netmiko. Returns a list of strings, with the output from each command. commands A list of commands to be executed. expect_string Regular expression pattern to use for determining end of output. If left blank will default to being based on router prompt. delay_factor: ``1`` Multiplying factor used to adjust delays (default: ``1``). max_loops: ``500`` Controls wait time in conjunction with delay_factor. Will default to be based upon self.timeout. auto_find_prompt: ``True`` Whether it should try to auto-detect the prompt (default: ``True``). strip_prompt: ``True`` Remove the trailing router prompt from the output (default: ``True``). strip_command: ``True`` Remove the echo of the command from the output (default: ``True``). normalize: ``True`` Ensure the proper enter is sent at end of command (default: ``True``). use_textfsm: ``False`` Process command output through TextFSM template (default: ``False``). CLI Example: .. code-block:: bash salt '*' napalm.netmiko_commands 'show version' 'show interfaces' ] variable[conn] assign[=] call[name[netmiko_conn], parameter[]] variable[ret] assign[=] list[[]] for taget[name[cmd]] in starred[name[commands]] begin[:] call[name[ret].append, parameter[call[name[conn].send_command, parameter[name[cmd]]]]] return[name[ret]]
keyword[def] identifier[netmiko_commands] (* identifier[commands] ,** identifier[kwargs] ): literal[string] identifier[conn] = identifier[netmiko_conn] (** identifier[kwargs] ) identifier[ret] =[] keyword[for] identifier[cmd] keyword[in] identifier[commands] : identifier[ret] . identifier[append] ( identifier[conn] . identifier[send_command] ( identifier[cmd] )) keyword[return] identifier[ret]
def netmiko_commands(*commands, **kwargs): """ .. versionadded:: 2019.2.0 Invoke one or more commands to be executed on the remote device, via Netmiko. Returns a list of strings, with the output from each command. commands A list of commands to be executed. expect_string Regular expression pattern to use for determining end of output. If left blank will default to being based on router prompt. delay_factor: ``1`` Multiplying factor used to adjust delays (default: ``1``). max_loops: ``500`` Controls wait time in conjunction with delay_factor. Will default to be based upon self.timeout. auto_find_prompt: ``True`` Whether it should try to auto-detect the prompt (default: ``True``). strip_prompt: ``True`` Remove the trailing router prompt from the output (default: ``True``). strip_command: ``True`` Remove the echo of the command from the output (default: ``True``). normalize: ``True`` Ensure the proper enter is sent at end of command (default: ``True``). use_textfsm: ``False`` Process command output through TextFSM template (default: ``False``). CLI Example: .. code-block:: bash salt '*' napalm.netmiko_commands 'show version' 'show interfaces' """ conn = netmiko_conn(**kwargs) ret = [] for cmd in commands: ret.append(conn.send_command(cmd)) # depends on [control=['for'], data=['cmd']] return ret
def main(): """ Get arguments and call the execution function""" if len(sys.argv) < 6: print("Usage: %s server_url username password namespace' \ ' classname" % sys.argv[0]) print('Using internal defaults') server_url = SERVER_URL namespace = TEST_NAMESPACE username = USERNAME password = PASSWORD classname = TEST_CLASS else: print('Get from input') server_url = sys.argv[1] namespace = sys.argv[2] username = sys.argv[3] password = sys.argv[4] classname = sys.argv[5] # create the credentials tuple for WBEMConnection creds = (username, password) # call the method to execute the request and display results execute_request(server_url, creds, namespace, classname) return 0
def function[main, parameter[]]: constant[ Get arguments and call the execution function] if compare[call[name[len], parameter[name[sys].argv]] less[<] constant[6]] begin[:] call[name[print], parameter[binary_operation[constant[Usage: %s server_url username password namespace' ' classname] <ast.Mod object at 0x7da2590d6920> call[name[sys].argv][constant[0]]]]] call[name[print], parameter[constant[Using internal defaults]]] variable[server_url] assign[=] name[SERVER_URL] variable[namespace] assign[=] name[TEST_NAMESPACE] variable[username] assign[=] name[USERNAME] variable[password] assign[=] name[PASSWORD] variable[classname] assign[=] name[TEST_CLASS] variable[creds] assign[=] tuple[[<ast.Name object at 0x7da204345900>, <ast.Name object at 0x7da2043450c0>]] call[name[execute_request], parameter[name[server_url], name[creds], name[namespace], name[classname]]] return[constant[0]]
keyword[def] identifier[main] (): literal[string] keyword[if] identifier[len] ( identifier[sys] . identifier[argv] )< literal[int] : identifier[print] ( literal[string] % identifier[sys] . identifier[argv] [ literal[int] ]) identifier[print] ( literal[string] ) identifier[server_url] = identifier[SERVER_URL] identifier[namespace] = identifier[TEST_NAMESPACE] identifier[username] = identifier[USERNAME] identifier[password] = identifier[PASSWORD] identifier[classname] = identifier[TEST_CLASS] keyword[else] : identifier[print] ( literal[string] ) identifier[server_url] = identifier[sys] . identifier[argv] [ literal[int] ] identifier[namespace] = identifier[sys] . identifier[argv] [ literal[int] ] identifier[username] = identifier[sys] . identifier[argv] [ literal[int] ] identifier[password] = identifier[sys] . identifier[argv] [ literal[int] ] identifier[classname] = identifier[sys] . identifier[argv] [ literal[int] ] identifier[creds] =( identifier[username] , identifier[password] ) identifier[execute_request] ( identifier[server_url] , identifier[creds] , identifier[namespace] , identifier[classname] ) keyword[return] literal[int]
def main(): """ Get arguments and call the execution function""" if len(sys.argv) < 6: print("Usage: %s server_url username password namespace' ' classname" % sys.argv[0]) print('Using internal defaults') server_url = SERVER_URL namespace = TEST_NAMESPACE username = USERNAME password = PASSWORD classname = TEST_CLASS # depends on [control=['if'], data=[]] else: print('Get from input') server_url = sys.argv[1] namespace = sys.argv[2] username = sys.argv[3] password = sys.argv[4] classname = sys.argv[5] # create the credentials tuple for WBEMConnection creds = (username, password) # call the method to execute the request and display results execute_request(server_url, creds, namespace, classname) return 0
def _wrapped_unsigned_mul(a, b): """ Perform wrapped unsigned multiplication on two StridedIntervals. :param a: The first operand (StridedInterval) :param b: The second operand (StridedInterval) :return: The multiplication result """ if a.bits != b.bits: logger.warning("Signed mul: two parameters have different bit length") bits = max(a.bits, b.bits) lb = a.lower_bound * b.lower_bound ub = a.upper_bound * b.upper_bound uninit_flag = a.uninitialized | b.uninitialized if (ub - lb) < (2 ** bits): if b.is_integer: # Multiplication with an integer, and it does not overflow! stride = abs(a.stride * b.lower_bound) elif a.is_integer: stride = abs(a.lower_bound * b.stride) else: stride = fractions.gcd(a.stride, b.stride) return StridedInterval(bits=bits, stride=stride, lower_bound=lb, upper_bound=ub, uninitialized=uninit_flag) else: # Overflow occurred return StridedInterval.top(bits, uninitialized=False)
def function[_wrapped_unsigned_mul, parameter[a, b]]: constant[ Perform wrapped unsigned multiplication on two StridedIntervals. :param a: The first operand (StridedInterval) :param b: The second operand (StridedInterval) :return: The multiplication result ] if compare[name[a].bits not_equal[!=] name[b].bits] begin[:] call[name[logger].warning, parameter[constant[Signed mul: two parameters have different bit length]]] variable[bits] assign[=] call[name[max], parameter[name[a].bits, name[b].bits]] variable[lb] assign[=] binary_operation[name[a].lower_bound * name[b].lower_bound] variable[ub] assign[=] binary_operation[name[a].upper_bound * name[b].upper_bound] variable[uninit_flag] assign[=] binary_operation[name[a].uninitialized <ast.BitOr object at 0x7da2590d6aa0> name[b].uninitialized] if compare[binary_operation[name[ub] - name[lb]] less[<] binary_operation[constant[2] ** name[bits]]] begin[:] if name[b].is_integer begin[:] variable[stride] assign[=] call[name[abs], parameter[binary_operation[name[a].stride * name[b].lower_bound]]] return[call[name[StridedInterval], parameter[]]]
keyword[def] identifier[_wrapped_unsigned_mul] ( identifier[a] , identifier[b] ): literal[string] keyword[if] identifier[a] . identifier[bits] != identifier[b] . identifier[bits] : identifier[logger] . identifier[warning] ( literal[string] ) identifier[bits] = identifier[max] ( identifier[a] . identifier[bits] , identifier[b] . identifier[bits] ) identifier[lb] = identifier[a] . identifier[lower_bound] * identifier[b] . identifier[lower_bound] identifier[ub] = identifier[a] . identifier[upper_bound] * identifier[b] . identifier[upper_bound] identifier[uninit_flag] = identifier[a] . identifier[uninitialized] | identifier[b] . identifier[uninitialized] keyword[if] ( identifier[ub] - identifier[lb] )<( literal[int] ** identifier[bits] ): keyword[if] identifier[b] . identifier[is_integer] : identifier[stride] = identifier[abs] ( identifier[a] . identifier[stride] * identifier[b] . identifier[lower_bound] ) keyword[elif] identifier[a] . identifier[is_integer] : identifier[stride] = identifier[abs] ( identifier[a] . identifier[lower_bound] * identifier[b] . identifier[stride] ) keyword[else] : identifier[stride] = identifier[fractions] . identifier[gcd] ( identifier[a] . identifier[stride] , identifier[b] . identifier[stride] ) keyword[return] identifier[StridedInterval] ( identifier[bits] = identifier[bits] , identifier[stride] = identifier[stride] , identifier[lower_bound] = identifier[lb] , identifier[upper_bound] = identifier[ub] , identifier[uninitialized] = identifier[uninit_flag] ) keyword[else] : keyword[return] identifier[StridedInterval] . identifier[top] ( identifier[bits] , identifier[uninitialized] = keyword[False] )
def _wrapped_unsigned_mul(a, b): """ Perform wrapped unsigned multiplication on two StridedIntervals. :param a: The first operand (StridedInterval) :param b: The second operand (StridedInterval) :return: The multiplication result """ if a.bits != b.bits: logger.warning('Signed mul: two parameters have different bit length') # depends on [control=['if'], data=[]] bits = max(a.bits, b.bits) lb = a.lower_bound * b.lower_bound ub = a.upper_bound * b.upper_bound uninit_flag = a.uninitialized | b.uninitialized if ub - lb < 2 ** bits: if b.is_integer: # Multiplication with an integer, and it does not overflow! stride = abs(a.stride * b.lower_bound) # depends on [control=['if'], data=[]] elif a.is_integer: stride = abs(a.lower_bound * b.stride) # depends on [control=['if'], data=[]] else: stride = fractions.gcd(a.stride, b.stride) return StridedInterval(bits=bits, stride=stride, lower_bound=lb, upper_bound=ub, uninitialized=uninit_flag) # depends on [control=['if'], data=[]] else: # Overflow occurred return StridedInterval.top(bits, uninitialized=False)
def doesNotMatch(self, value, caseSensitive=True): """ Sets the operator type to Query.Op.DoesNotMatch and sets the \ value to the inputted value. :param value <variant> :return self (useful for chaining) :usage |>>> from orb import Query as Q |>>> query = Q('comments').doesNotMatch('test') |>>> print query |comments does_not_contain test """ newq = self.copy() newq.setOp(Query.Op.DoesNotMatch) newq.setValue(value) newq.setCaseSensitive(caseSensitive) return newq
def function[doesNotMatch, parameter[self, value, caseSensitive]]: constant[ Sets the operator type to Query.Op.DoesNotMatch and sets the value to the inputted value. :param value <variant> :return self (useful for chaining) :usage |>>> from orb import Query as Q |>>> query = Q('comments').doesNotMatch('test') |>>> print query |comments does_not_contain test ] variable[newq] assign[=] call[name[self].copy, parameter[]] call[name[newq].setOp, parameter[name[Query].Op.DoesNotMatch]] call[name[newq].setValue, parameter[name[value]]] call[name[newq].setCaseSensitive, parameter[name[caseSensitive]]] return[name[newq]]
keyword[def] identifier[doesNotMatch] ( identifier[self] , identifier[value] , identifier[caseSensitive] = keyword[True] ): literal[string] identifier[newq] = identifier[self] . identifier[copy] () identifier[newq] . identifier[setOp] ( identifier[Query] . identifier[Op] . identifier[DoesNotMatch] ) identifier[newq] . identifier[setValue] ( identifier[value] ) identifier[newq] . identifier[setCaseSensitive] ( identifier[caseSensitive] ) keyword[return] identifier[newq]
def doesNotMatch(self, value, caseSensitive=True): """ Sets the operator type to Query.Op.DoesNotMatch and sets the value to the inputted value. :param value <variant> :return self (useful for chaining) :usage |>>> from orb import Query as Q |>>> query = Q('comments').doesNotMatch('test') |>>> print query |comments does_not_contain test """ newq = self.copy() newq.setOp(Query.Op.DoesNotMatch) newq.setValue(value) newq.setCaseSensitive(caseSensitive) return newq
def as_markdown(self): """Gets report as json :return: json-formatted report """ labels, data = self._get_table() table = MarkdownTable(labels, data) return str(table)
def function[as_markdown, parameter[self]]: constant[Gets report as json :return: json-formatted report ] <ast.Tuple object at 0x7da1b1ec1c90> assign[=] call[name[self]._get_table, parameter[]] variable[table] assign[=] call[name[MarkdownTable], parameter[name[labels], name[data]]] return[call[name[str], parameter[name[table]]]]
keyword[def] identifier[as_markdown] ( identifier[self] ): literal[string] identifier[labels] , identifier[data] = identifier[self] . identifier[_get_table] () identifier[table] = identifier[MarkdownTable] ( identifier[labels] , identifier[data] ) keyword[return] identifier[str] ( identifier[table] )
def as_markdown(self): """Gets report as json :return: json-formatted report """ (labels, data) = self._get_table() table = MarkdownTable(labels, data) return str(table)
def _create(opener, format_code, files, filter_code=None, block_size=16384): """Create an archive from a collection of files (not recursive).""" a = _archive_write_new() _set_write_context(a, format_code, filter_code) _LOGGER.debug("Opening archive (create).") opener(a) # Use the standard uid/gid lookup mechanisms. # This was set on an instance of *disk* that wasn't used. Do we still need it? #_archive_read_disk_set_standard_lookup(disk) # We used to yield this, but that necessitated users always flattening the # response. This means we don't have to, but we still have to return an # enumerable in order to maintain compatibility. added = [] for filepath in files: filepath = filepath.encode('utf-8') disk = libarchive.calls.archive_read.c_archive_read_disk_new() libarchive.calls.archive_read.c_archive_read_disk_open( disk, filepath) while 1: entry = libarchive.calls.archive_entry.c_archive_entry_new() r = libarchive.calls.archive_read.c_archive_read_next_header2( disk, entry) if r == libarchive.constants.archive.ARCHIVE_EOF: break elif r != libarchive.constants.archive.ARCHIVE_OK: message = c_archive_error_string(disk) raise libarchive.exception.ArchiveError( "Could not build header from physical source file " "during create: (%d) [%s]" % (r, message)) ae = libarchive.adapters.archive_entry.ArchiveEntry( disk, entry) # print("WRITING: [{}] {}".format(ae, ae.filetype)) # Strip leading slash so it stores as a relative path. if os.path.isabs(ae.pathname) is True: ae.pathname = ae.pathname[1:] added.append(ae) libarchive.calls.archive_read.c_archive_read_disk_descend(disk) # NOTE: There's a `archive_entry_set_size()` on the underlying # entry type, but it doesn't appear to be necessary. The sizes # report perfectly fine with the [probably automatic] counting that # occurs just with `_archive_write_data()`. r = _archive_write_header(a, entry) if ae.filetype.IFLNK is True and os.path.islink(ae.sourcepath) is True: target_path = os.readlink(ae.sourcepath) ae.symlink_targetpath = target_path else: with open(ae.sourcepath, 'rb') as f: while 1: data = f.read(block_size) if not data: break _archive_write_data(a, data) libarchive.calls.archive_entry.c_archive_entry_free(entry) libarchive.calls.archive_read.c_archive_read_close(disk) libarchive.calls.archive_read.c_archive_read_free(disk) _LOGGER.debug("Closing archive (create).") _archive_write_close(a) _archive_write_free(a) return added
def function[_create, parameter[opener, format_code, files, filter_code, block_size]]: constant[Create an archive from a collection of files (not recursive).] variable[a] assign[=] call[name[_archive_write_new], parameter[]] call[name[_set_write_context], parameter[name[a], name[format_code], name[filter_code]]] call[name[_LOGGER].debug, parameter[constant[Opening archive (create).]]] call[name[opener], parameter[name[a]]] variable[added] assign[=] list[[]] for taget[name[filepath]] in starred[name[files]] begin[:] variable[filepath] assign[=] call[name[filepath].encode, parameter[constant[utf-8]]] variable[disk] assign[=] call[name[libarchive].calls.archive_read.c_archive_read_disk_new, parameter[]] call[name[libarchive].calls.archive_read.c_archive_read_disk_open, parameter[name[disk], name[filepath]]] while constant[1] begin[:] variable[entry] assign[=] call[name[libarchive].calls.archive_entry.c_archive_entry_new, parameter[]] variable[r] assign[=] call[name[libarchive].calls.archive_read.c_archive_read_next_header2, parameter[name[disk], name[entry]]] if compare[name[r] equal[==] name[libarchive].constants.archive.ARCHIVE_EOF] begin[:] break variable[ae] assign[=] call[name[libarchive].adapters.archive_entry.ArchiveEntry, parameter[name[disk], name[entry]]] if compare[call[name[os].path.isabs, parameter[name[ae].pathname]] is constant[True]] begin[:] name[ae].pathname assign[=] call[name[ae].pathname][<ast.Slice object at 0x7da1b0f58730>] call[name[added].append, parameter[name[ae]]] call[name[libarchive].calls.archive_read.c_archive_read_disk_descend, parameter[name[disk]]] variable[r] assign[=] call[name[_archive_write_header], parameter[name[a], name[entry]]] if <ast.BoolOp object at 0x7da1b0f59810> begin[:] variable[target_path] assign[=] call[name[os].readlink, parameter[name[ae].sourcepath]] name[ae].symlink_targetpath assign[=] name[target_path] call[name[libarchive].calls.archive_entry.c_archive_entry_free, parameter[name[entry]]] call[name[libarchive].calls.archive_read.c_archive_read_close, parameter[name[disk]]] call[name[libarchive].calls.archive_read.c_archive_read_free, parameter[name[disk]]] call[name[_LOGGER].debug, parameter[constant[Closing archive (create).]]] call[name[_archive_write_close], parameter[name[a]]] call[name[_archive_write_free], parameter[name[a]]] return[name[added]]
keyword[def] identifier[_create] ( identifier[opener] , identifier[format_code] , identifier[files] , identifier[filter_code] = keyword[None] , identifier[block_size] = literal[int] ): literal[string] identifier[a] = identifier[_archive_write_new] () identifier[_set_write_context] ( identifier[a] , identifier[format_code] , identifier[filter_code] ) identifier[_LOGGER] . identifier[debug] ( literal[string] ) identifier[opener] ( identifier[a] ) identifier[added] =[] keyword[for] identifier[filepath] keyword[in] identifier[files] : identifier[filepath] = identifier[filepath] . identifier[encode] ( literal[string] ) identifier[disk] = identifier[libarchive] . identifier[calls] . identifier[archive_read] . identifier[c_archive_read_disk_new] () identifier[libarchive] . identifier[calls] . identifier[archive_read] . identifier[c_archive_read_disk_open] ( identifier[disk] , identifier[filepath] ) keyword[while] literal[int] : identifier[entry] = identifier[libarchive] . identifier[calls] . identifier[archive_entry] . identifier[c_archive_entry_new] () identifier[r] = identifier[libarchive] . identifier[calls] . identifier[archive_read] . identifier[c_archive_read_next_header2] ( identifier[disk] , identifier[entry] ) keyword[if] identifier[r] == identifier[libarchive] . identifier[constants] . identifier[archive] . identifier[ARCHIVE_EOF] : keyword[break] keyword[elif] identifier[r] != identifier[libarchive] . identifier[constants] . identifier[archive] . identifier[ARCHIVE_OK] : identifier[message] = identifier[c_archive_error_string] ( identifier[disk] ) keyword[raise] identifier[libarchive] . identifier[exception] . identifier[ArchiveError] ( literal[string] literal[string] % ( identifier[r] , identifier[message] )) identifier[ae] = identifier[libarchive] . identifier[adapters] . identifier[archive_entry] . identifier[ArchiveEntry] ( identifier[disk] , identifier[entry] ) keyword[if] identifier[os] . identifier[path] . identifier[isabs] ( identifier[ae] . identifier[pathname] ) keyword[is] keyword[True] : identifier[ae] . identifier[pathname] = identifier[ae] . identifier[pathname] [ literal[int] :] identifier[added] . identifier[append] ( identifier[ae] ) identifier[libarchive] . identifier[calls] . identifier[archive_read] . identifier[c_archive_read_disk_descend] ( identifier[disk] ) identifier[r] = identifier[_archive_write_header] ( identifier[a] , identifier[entry] ) keyword[if] identifier[ae] . identifier[filetype] . identifier[IFLNK] keyword[is] keyword[True] keyword[and] identifier[os] . identifier[path] . identifier[islink] ( identifier[ae] . identifier[sourcepath] ) keyword[is] keyword[True] : identifier[target_path] = identifier[os] . identifier[readlink] ( identifier[ae] . identifier[sourcepath] ) identifier[ae] . identifier[symlink_targetpath] = identifier[target_path] keyword[else] : keyword[with] identifier[open] ( identifier[ae] . identifier[sourcepath] , literal[string] ) keyword[as] identifier[f] : keyword[while] literal[int] : identifier[data] = identifier[f] . identifier[read] ( identifier[block_size] ) keyword[if] keyword[not] identifier[data] : keyword[break] identifier[_archive_write_data] ( identifier[a] , identifier[data] ) identifier[libarchive] . identifier[calls] . identifier[archive_entry] . identifier[c_archive_entry_free] ( identifier[entry] ) identifier[libarchive] . identifier[calls] . identifier[archive_read] . identifier[c_archive_read_close] ( identifier[disk] ) identifier[libarchive] . identifier[calls] . identifier[archive_read] . identifier[c_archive_read_free] ( identifier[disk] ) identifier[_LOGGER] . identifier[debug] ( literal[string] ) identifier[_archive_write_close] ( identifier[a] ) identifier[_archive_write_free] ( identifier[a] ) keyword[return] identifier[added]
def _create(opener, format_code, files, filter_code=None, block_size=16384): """Create an archive from a collection of files (not recursive).""" a = _archive_write_new() _set_write_context(a, format_code, filter_code) _LOGGER.debug('Opening archive (create).') opener(a) # Use the standard uid/gid lookup mechanisms. # This was set on an instance of *disk* that wasn't used. Do we still need it? #_archive_read_disk_set_standard_lookup(disk) # We used to yield this, but that necessitated users always flattening the # response. This means we don't have to, but we still have to return an # enumerable in order to maintain compatibility. added = [] for filepath in files: filepath = filepath.encode('utf-8') disk = libarchive.calls.archive_read.c_archive_read_disk_new() libarchive.calls.archive_read.c_archive_read_disk_open(disk, filepath) while 1: entry = libarchive.calls.archive_entry.c_archive_entry_new() r = libarchive.calls.archive_read.c_archive_read_next_header2(disk, entry) if r == libarchive.constants.archive.ARCHIVE_EOF: break # depends on [control=['if'], data=[]] elif r != libarchive.constants.archive.ARCHIVE_OK: message = c_archive_error_string(disk) raise libarchive.exception.ArchiveError('Could not build header from physical source file during create: (%d) [%s]' % (r, message)) # depends on [control=['if'], data=['r']] ae = libarchive.adapters.archive_entry.ArchiveEntry(disk, entry) # print("WRITING: [{}] {}".format(ae, ae.filetype)) # Strip leading slash so it stores as a relative path. if os.path.isabs(ae.pathname) is True: ae.pathname = ae.pathname[1:] # depends on [control=['if'], data=[]] added.append(ae) libarchive.calls.archive_read.c_archive_read_disk_descend(disk) # NOTE: There's a `archive_entry_set_size()` on the underlying # entry type, but it doesn't appear to be necessary. The sizes # report perfectly fine with the [probably automatic] counting that # occurs just with `_archive_write_data()`. r = _archive_write_header(a, entry) if ae.filetype.IFLNK is True and os.path.islink(ae.sourcepath) is True: target_path = os.readlink(ae.sourcepath) ae.symlink_targetpath = target_path # depends on [control=['if'], data=[]] else: with open(ae.sourcepath, 'rb') as f: while 1: data = f.read(block_size) if not data: break # depends on [control=['if'], data=[]] _archive_write_data(a, data) # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['f']] libarchive.calls.archive_entry.c_archive_entry_free(entry) # depends on [control=['while'], data=[]] libarchive.calls.archive_read.c_archive_read_close(disk) libarchive.calls.archive_read.c_archive_read_free(disk) # depends on [control=['for'], data=['filepath']] _LOGGER.debug('Closing archive (create).') _archive_write_close(a) _archive_write_free(a) return added
def convert_bytes(b): '''Convert a number of bytes into a human readable memory usage, bytes, kilo, mega, giga, tera, peta, exa, zetta, yotta''' if b is None: return '#NA' for s in reversed(memory_symbols): if b >= memory_size[s]: value = float(b) / memory_size[s] return '%.1f%sB' % (value, s) return "%sB" % b
def function[convert_bytes, parameter[b]]: constant[Convert a number of bytes into a human readable memory usage, bytes, kilo, mega, giga, tera, peta, exa, zetta, yotta] if compare[name[b] is constant[None]] begin[:] return[constant[#NA]] for taget[name[s]] in starred[call[name[reversed], parameter[name[memory_symbols]]]] begin[:] if compare[name[b] greater_or_equal[>=] call[name[memory_size]][name[s]]] begin[:] variable[value] assign[=] binary_operation[call[name[float], parameter[name[b]]] / call[name[memory_size]][name[s]]] return[binary_operation[constant[%.1f%sB] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cd570>, <ast.Name object at 0x7da18c4cfa90>]]]] return[binary_operation[constant[%sB] <ast.Mod object at 0x7da2590d6920> name[b]]]
keyword[def] identifier[convert_bytes] ( identifier[b] ): literal[string] keyword[if] identifier[b] keyword[is] keyword[None] : keyword[return] literal[string] keyword[for] identifier[s] keyword[in] identifier[reversed] ( identifier[memory_symbols] ): keyword[if] identifier[b] >= identifier[memory_size] [ identifier[s] ]: identifier[value] = identifier[float] ( identifier[b] )/ identifier[memory_size] [ identifier[s] ] keyword[return] literal[string] %( identifier[value] , identifier[s] ) keyword[return] literal[string] % identifier[b]
def convert_bytes(b): """Convert a number of bytes into a human readable memory usage, bytes, kilo, mega, giga, tera, peta, exa, zetta, yotta""" if b is None: return '#NA' # depends on [control=['if'], data=[]] for s in reversed(memory_symbols): if b >= memory_size[s]: value = float(b) / memory_size[s] return '%.1f%sB' % (value, s) # depends on [control=['if'], data=['b']] # depends on [control=['for'], data=['s']] return '%sB' % b
def write_pa11y_config(item): """ The only way that pa11y will see the same page that scrapy sees is to make sure that pa11y requests the page with the same headers. However, the only way to configure request headers with pa11y is to write them into a config file. This function will create a config file, write the config into it, and return a reference to that file. """ config = { "page": { "headers": item["request_headers"], }, } config_file = tempfile.NamedTemporaryFile( mode="w", prefix="pa11y-config-", suffix=".json", delete=False ) json.dump(config, config_file) config_file.close() return config_file
def function[write_pa11y_config, parameter[item]]: constant[ The only way that pa11y will see the same page that scrapy sees is to make sure that pa11y requests the page with the same headers. However, the only way to configure request headers with pa11y is to write them into a config file. This function will create a config file, write the config into it, and return a reference to that file. ] variable[config] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a80a0>], [<ast.Dict object at 0x7da20c6abc70>]] variable[config_file] assign[=] call[name[tempfile].NamedTemporaryFile, parameter[]] call[name[json].dump, parameter[name[config], name[config_file]]] call[name[config_file].close, parameter[]] return[name[config_file]]
keyword[def] identifier[write_pa11y_config] ( identifier[item] ): literal[string] identifier[config] ={ literal[string] :{ literal[string] : identifier[item] [ literal[string] ], }, } identifier[config_file] = identifier[tempfile] . identifier[NamedTemporaryFile] ( identifier[mode] = literal[string] , identifier[prefix] = literal[string] , identifier[suffix] = literal[string] , identifier[delete] = keyword[False] ) identifier[json] . identifier[dump] ( identifier[config] , identifier[config_file] ) identifier[config_file] . identifier[close] () keyword[return] identifier[config_file]
def write_pa11y_config(item): """ The only way that pa11y will see the same page that scrapy sees is to make sure that pa11y requests the page with the same headers. However, the only way to configure request headers with pa11y is to write them into a config file. This function will create a config file, write the config into it, and return a reference to that file. """ config = {'page': {'headers': item['request_headers']}} config_file = tempfile.NamedTemporaryFile(mode='w', prefix='pa11y-config-', suffix='.json', delete=False) json.dump(config, config_file) config_file.close() return config_file
def getSwapStats(self): """Return information on swap partition and / or files. @return: Dictionary of stats. """ info_dict = {} try: fp = open(swapsFile, 'r') data = fp.read() fp.close() except: raise IOError('Failed reading stats from file: %s' % swapsFile) lines = data.splitlines() if len(lines) > 1: colnames = [name.lower() for name in lines[0].split()] for line in lines[1:]: cols = line.split() info_dict[cols[0]] = dict(zip(colnames[1:], cols[1:])) return info_dict
def function[getSwapStats, parameter[self]]: constant[Return information on swap partition and / or files. @return: Dictionary of stats. ] variable[info_dict] assign[=] dictionary[[], []] <ast.Try object at 0x7da18dc9bd00> variable[lines] assign[=] call[name[data].splitlines, parameter[]] if compare[call[name[len], parameter[name[lines]]] greater[>] constant[1]] begin[:] variable[colnames] assign[=] <ast.ListComp object at 0x7da18dc9b9d0> for taget[name[line]] in starred[call[name[lines]][<ast.Slice object at 0x7da18dc98f40>]] begin[:] variable[cols] assign[=] call[name[line].split, parameter[]] call[name[info_dict]][call[name[cols]][constant[0]]] assign[=] call[name[dict], parameter[call[name[zip], parameter[call[name[colnames]][<ast.Slice object at 0x7da18dc9a6b0>], call[name[cols]][<ast.Slice object at 0x7da1b10dee90>]]]]] return[name[info_dict]]
keyword[def] identifier[getSwapStats] ( identifier[self] ): literal[string] identifier[info_dict] ={} keyword[try] : identifier[fp] = identifier[open] ( identifier[swapsFile] , literal[string] ) identifier[data] = identifier[fp] . identifier[read] () identifier[fp] . identifier[close] () keyword[except] : keyword[raise] identifier[IOError] ( literal[string] % identifier[swapsFile] ) identifier[lines] = identifier[data] . identifier[splitlines] () keyword[if] identifier[len] ( identifier[lines] )> literal[int] : identifier[colnames] =[ identifier[name] . identifier[lower] () keyword[for] identifier[name] keyword[in] identifier[lines] [ literal[int] ]. identifier[split] ()] keyword[for] identifier[line] keyword[in] identifier[lines] [ literal[int] :]: identifier[cols] = identifier[line] . identifier[split] () identifier[info_dict] [ identifier[cols] [ literal[int] ]]= identifier[dict] ( identifier[zip] ( identifier[colnames] [ literal[int] :], identifier[cols] [ literal[int] :])) keyword[return] identifier[info_dict]
def getSwapStats(self): """Return information on swap partition and / or files. @return: Dictionary of stats. """ info_dict = {} try: fp = open(swapsFile, 'r') data = fp.read() fp.close() # depends on [control=['try'], data=[]] except: raise IOError('Failed reading stats from file: %s' % swapsFile) # depends on [control=['except'], data=[]] lines = data.splitlines() if len(lines) > 1: colnames = [name.lower() for name in lines[0].split()] for line in lines[1:]: cols = line.split() info_dict[cols[0]] = dict(zip(colnames[1:], cols[1:])) # depends on [control=['for'], data=['line']] # depends on [control=['if'], data=[]] return info_dict
def start(name): ''' Start a VM CLI Example: .. code-block:: bash salt '*' vboxmanage.start my_vm ''' ret = {} cmd = '{0} startvm {1}'.format(vboxcmd(), name) ret = salt.modules.cmdmod.run(cmd).splitlines() return ret
def function[start, parameter[name]]: constant[ Start a VM CLI Example: .. code-block:: bash salt '*' vboxmanage.start my_vm ] variable[ret] assign[=] dictionary[[], []] variable[cmd] assign[=] call[constant[{0} startvm {1}].format, parameter[call[name[vboxcmd], parameter[]], name[name]]] variable[ret] assign[=] call[call[name[salt].modules.cmdmod.run, parameter[name[cmd]]].splitlines, parameter[]] return[name[ret]]
keyword[def] identifier[start] ( identifier[name] ): literal[string] identifier[ret] ={} identifier[cmd] = literal[string] . identifier[format] ( identifier[vboxcmd] (), identifier[name] ) identifier[ret] = identifier[salt] . identifier[modules] . identifier[cmdmod] . identifier[run] ( identifier[cmd] ). identifier[splitlines] () keyword[return] identifier[ret]
def start(name): """ Start a VM CLI Example: .. code-block:: bash salt '*' vboxmanage.start my_vm """ ret = {} cmd = '{0} startvm {1}'.format(vboxcmd(), name) ret = salt.modules.cmdmod.run(cmd).splitlines() return ret
def new(self, isdir, isparent, name, parent): # type: (bool, bool, bytes, Optional[UDFFileEntry]) -> None ''' A method to create a new UDF File Identifier. Parameters: isdir - Whether this File Identifier is a directory. isparent - Whether this File Identifier is a parent (..). name - The name for this File Identifier. parent - The UDF File Entry representing the parent. Returns: Nothing. ''' if self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF File Identifier already initialized') self.desc_tag = UDFTag() self.desc_tag.new(257) # FIXME: we should let the user set serial_number self.icb = UDFLongAD() self.icb.new(2048, 2) self.isdir = isdir self.isparent = isparent self.file_characteristics = 0 if self.isdir: self.file_characteristics |= 0x2 if self.isparent: self.file_characteristics |= 0x8 self.len_impl_use = 0 # FIXME: need to let the user set this self.impl_use = b'' self.len_fi = 0 if not isparent: bytename = name.decode('utf-8') try: self.fi = bytename.encode('latin-1') self.encoding = 'latin-1' except UnicodeEncodeError: self.fi = bytename.encode('utf-16_be') self.encoding = 'utf-16_be' self.len_fi = len(self.fi) + 1 self.parent = parent self._initialized = True
def function[new, parameter[self, isdir, isparent, name, parent]]: constant[ A method to create a new UDF File Identifier. Parameters: isdir - Whether this File Identifier is a directory. isparent - Whether this File Identifier is a parent (..). name - The name for this File Identifier. parent - The UDF File Entry representing the parent. Returns: Nothing. ] if name[self]._initialized begin[:] <ast.Raise object at 0x7da20e955210> name[self].desc_tag assign[=] call[name[UDFTag], parameter[]] call[name[self].desc_tag.new, parameter[constant[257]]] name[self].icb assign[=] call[name[UDFLongAD], parameter[]] call[name[self].icb.new, parameter[constant[2048], constant[2]]] name[self].isdir assign[=] name[isdir] name[self].isparent assign[=] name[isparent] name[self].file_characteristics assign[=] constant[0] if name[self].isdir begin[:] <ast.AugAssign object at 0x7da1b0fcf7c0> if name[self].isparent begin[:] <ast.AugAssign object at 0x7da1b0fcf490> name[self].len_impl_use assign[=] constant[0] name[self].impl_use assign[=] constant[b''] name[self].len_fi assign[=] constant[0] if <ast.UnaryOp object at 0x7da1b0fcdc90> begin[:] variable[bytename] assign[=] call[name[name].decode, parameter[constant[utf-8]]] <ast.Try object at 0x7da1b0fce560> name[self].len_fi assign[=] binary_operation[call[name[len], parameter[name[self].fi]] + constant[1]] name[self].parent assign[=] name[parent] name[self]._initialized assign[=] constant[True]
keyword[def] identifier[new] ( identifier[self] , identifier[isdir] , identifier[isparent] , identifier[name] , identifier[parent] ): literal[string] keyword[if] identifier[self] . identifier[_initialized] : keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] ) identifier[self] . identifier[desc_tag] = identifier[UDFTag] () identifier[self] . identifier[desc_tag] . identifier[new] ( literal[int] ) identifier[self] . identifier[icb] = identifier[UDFLongAD] () identifier[self] . identifier[icb] . identifier[new] ( literal[int] , literal[int] ) identifier[self] . identifier[isdir] = identifier[isdir] identifier[self] . identifier[isparent] = identifier[isparent] identifier[self] . identifier[file_characteristics] = literal[int] keyword[if] identifier[self] . identifier[isdir] : identifier[self] . identifier[file_characteristics] |= literal[int] keyword[if] identifier[self] . identifier[isparent] : identifier[self] . identifier[file_characteristics] |= literal[int] identifier[self] . identifier[len_impl_use] = literal[int] identifier[self] . identifier[impl_use] = literal[string] identifier[self] . identifier[len_fi] = literal[int] keyword[if] keyword[not] identifier[isparent] : identifier[bytename] = identifier[name] . identifier[decode] ( literal[string] ) keyword[try] : identifier[self] . identifier[fi] = identifier[bytename] . identifier[encode] ( literal[string] ) identifier[self] . identifier[encoding] = literal[string] keyword[except] identifier[UnicodeEncodeError] : identifier[self] . identifier[fi] = identifier[bytename] . identifier[encode] ( literal[string] ) identifier[self] . identifier[encoding] = literal[string] identifier[self] . identifier[len_fi] = identifier[len] ( identifier[self] . identifier[fi] )+ literal[int] identifier[self] . identifier[parent] = identifier[parent] identifier[self] . identifier[_initialized] = keyword[True]
def new(self, isdir, isparent, name, parent): # type: (bool, bool, bytes, Optional[UDFFileEntry]) -> None '\n A method to create a new UDF File Identifier.\n\n Parameters:\n isdir - Whether this File Identifier is a directory.\n isparent - Whether this File Identifier is a parent (..).\n name - The name for this File Identifier.\n parent - The UDF File Entry representing the parent.\n Returns:\n Nothing.\n ' if self._initialized: raise pycdlibexception.PyCdlibInternalError('UDF File Identifier already initialized') # depends on [control=['if'], data=[]] self.desc_tag = UDFTag() self.desc_tag.new(257) # FIXME: we should let the user set serial_number self.icb = UDFLongAD() self.icb.new(2048, 2) self.isdir = isdir self.isparent = isparent self.file_characteristics = 0 if self.isdir: self.file_characteristics |= 2 # depends on [control=['if'], data=[]] if self.isparent: self.file_characteristics |= 8 # depends on [control=['if'], data=[]] self.len_impl_use = 0 # FIXME: need to let the user set this self.impl_use = b'' self.len_fi = 0 if not isparent: bytename = name.decode('utf-8') try: self.fi = bytename.encode('latin-1') self.encoding = 'latin-1' # depends on [control=['try'], data=[]] except UnicodeEncodeError: self.fi = bytename.encode('utf-16_be') self.encoding = 'utf-16_be' # depends on [control=['except'], data=[]] self.len_fi = len(self.fi) + 1 # depends on [control=['if'], data=[]] self.parent = parent self._initialized = True
def _set_policy(self, v, load=False): """ Setter method for policy, mapped from YANG variable /mpls_state/policy (container) If this variable is read-only (config: false) in the source YANG file, then _set_policy is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_policy() directly. YANG Description: MPLS Policy """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=policy.policy, is_container='container', presence=False, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-policy', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """policy must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=policy.policy, is_container='container', presence=False, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-policy', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""", }) self.__policy = t if hasattr(self, '_set'): self._set()
def function[_set_policy, parameter[self, v, load]]: constant[ Setter method for policy, mapped from YANG variable /mpls_state/policy (container) If this variable is read-only (config: false) in the source YANG file, then _set_policy is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_policy() directly. YANG Description: MPLS Policy ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da207f02a70> name[self].__policy assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_policy] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[policy] . identifier[policy] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[False] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__policy] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_policy(self, v, load=False): """ Setter method for policy, mapped from YANG variable /mpls_state/policy (container) If this variable is read-only (config: false) in the source YANG file, then _set_policy is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_policy() directly. YANG Description: MPLS Policy """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=policy.policy, is_container='container', presence=False, yang_name='policy', rest_name='policy', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-policy', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'policy must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=policy.policy, is_container=\'container\', presence=False, yang_name="policy", rest_name="policy", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'mpls-policy\', u\'cli-suppress-show-path\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls-operational\', defining_module=\'brocade-mpls-operational\', yang_type=\'container\', is_config=False)'}) # depends on [control=['except'], data=[]] self.__policy = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def get_next_batch(self): """ This method is called from the manager. It must return a list or a generator of BaseRecord objects. When it has nothing else to read, it must set class variable "finished" to True. """ messages = self.get_from_kafka() if messages: for message in messages: item = BaseRecord(message) self.increase_read() yield item self.logger.debug('Done reading batch') self.last_position = self.consumer.offsets
def function[get_next_batch, parameter[self]]: constant[ This method is called from the manager. It must return a list or a generator of BaseRecord objects. When it has nothing else to read, it must set class variable "finished" to True. ] variable[messages] assign[=] call[name[self].get_from_kafka, parameter[]] if name[messages] begin[:] for taget[name[message]] in starred[name[messages]] begin[:] variable[item] assign[=] call[name[BaseRecord], parameter[name[message]]] call[name[self].increase_read, parameter[]] <ast.Yield object at 0x7da1b11915d0> call[name[self].logger.debug, parameter[constant[Done reading batch]]] name[self].last_position assign[=] name[self].consumer.offsets
keyword[def] identifier[get_next_batch] ( identifier[self] ): literal[string] identifier[messages] = identifier[self] . identifier[get_from_kafka] () keyword[if] identifier[messages] : keyword[for] identifier[message] keyword[in] identifier[messages] : identifier[item] = identifier[BaseRecord] ( identifier[message] ) identifier[self] . identifier[increase_read] () keyword[yield] identifier[item] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) identifier[self] . identifier[last_position] = identifier[self] . identifier[consumer] . identifier[offsets]
def get_next_batch(self): """ This method is called from the manager. It must return a list or a generator of BaseRecord objects. When it has nothing else to read, it must set class variable "finished" to True. """ messages = self.get_from_kafka() if messages: for message in messages: item = BaseRecord(message) self.increase_read() yield item # depends on [control=['for'], data=['message']] # depends on [control=['if'], data=[]] self.logger.debug('Done reading batch') self.last_position = self.consumer.offsets
def _const_node_to_py_ast(ctx: GeneratorContext, lisp_ast: Const) -> GeneratedPyAST: """Generate Python AST nodes for a :const Lisp AST node. Nested values in collections for :const nodes are not parsed. Consequently, this function cannot be called recursively for those nested values. Instead, call `_const_val_to_py_ast` on nested values.""" assert lisp_ast.op == NodeOp.CONST node_type = lisp_ast.type handle_const_node = _CONSTANT_HANDLER.get(node_type) assert handle_const_node is not None, f"No :const AST type handler for {node_type}" node_val = lisp_ast.val return handle_const_node(ctx, node_val)
def function[_const_node_to_py_ast, parameter[ctx, lisp_ast]]: constant[Generate Python AST nodes for a :const Lisp AST node. Nested values in collections for :const nodes are not parsed. Consequently, this function cannot be called recursively for those nested values. Instead, call `_const_val_to_py_ast` on nested values.] assert[compare[name[lisp_ast].op equal[==] name[NodeOp].CONST]] variable[node_type] assign[=] name[lisp_ast].type variable[handle_const_node] assign[=] call[name[_CONSTANT_HANDLER].get, parameter[name[node_type]]] assert[compare[name[handle_const_node] is_not constant[None]]] variable[node_val] assign[=] name[lisp_ast].val return[call[name[handle_const_node], parameter[name[ctx], name[node_val]]]]
keyword[def] identifier[_const_node_to_py_ast] ( identifier[ctx] : identifier[GeneratorContext] , identifier[lisp_ast] : identifier[Const] )-> identifier[GeneratedPyAST] : literal[string] keyword[assert] identifier[lisp_ast] . identifier[op] == identifier[NodeOp] . identifier[CONST] identifier[node_type] = identifier[lisp_ast] . identifier[type] identifier[handle_const_node] = identifier[_CONSTANT_HANDLER] . identifier[get] ( identifier[node_type] ) keyword[assert] identifier[handle_const_node] keyword[is] keyword[not] keyword[None] , literal[string] identifier[node_val] = identifier[lisp_ast] . identifier[val] keyword[return] identifier[handle_const_node] ( identifier[ctx] , identifier[node_val] )
def _const_node_to_py_ast(ctx: GeneratorContext, lisp_ast: Const) -> GeneratedPyAST: """Generate Python AST nodes for a :const Lisp AST node. Nested values in collections for :const nodes are not parsed. Consequently, this function cannot be called recursively for those nested values. Instead, call `_const_val_to_py_ast` on nested values.""" assert lisp_ast.op == NodeOp.CONST node_type = lisp_ast.type handle_const_node = _CONSTANT_HANDLER.get(node_type) assert handle_const_node is not None, f'No :const AST type handler for {node_type}' node_val = lisp_ast.val return handle_const_node(ctx, node_val)
def convertor(geometry, method="wgs2gcj"): """ convert wgs84 to gcj referencing by https://github.com/wandergis/coordTransform_py """ if geometry['type'] == 'Point': coords = geometry['coordinates'] coords[0], coords[1] = methods[method](coords[0], coords[1]) elif geometry['type'] == 'LineString' or geometry['type'] == 'MutliPoint': coordinates = geometry['coordinates'] for coords in coordinates: coords[0], coords[1] = methods[method](coords[0], coords[1]) elif geometry['type'] == 'Polygon' or geometry['type'] == 'MultiLineString': coordinates = geometry['coordinates'] for rings in coordinates: for coords in rings: coords[0], coords[1] = methods[method](coords[0], coords[1]) elif geometry['type'] == 'MultiPolygon': coordinates = geometry['coordinates'] for rings in coordinates: for lines in rings: for coords in lines: coords[0], coords[1] = methods[method](coords[0], coords[1]) return geometry
def function[convertor, parameter[geometry, method]]: constant[ convert wgs84 to gcj referencing by https://github.com/wandergis/coordTransform_py ] if compare[call[name[geometry]][constant[type]] equal[==] constant[Point]] begin[:] variable[coords] assign[=] call[name[geometry]][constant[coordinates]] <ast.Tuple object at 0x7da1b1040f10> assign[=] call[call[name[methods]][name[method]], parameter[call[name[coords]][constant[0]], call[name[coords]][constant[1]]]] return[name[geometry]]
keyword[def] identifier[convertor] ( identifier[geometry] , identifier[method] = literal[string] ): literal[string] keyword[if] identifier[geometry] [ literal[string] ]== literal[string] : identifier[coords] = identifier[geometry] [ literal[string] ] identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]= identifier[methods] [ identifier[method] ]( identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]) keyword[elif] identifier[geometry] [ literal[string] ]== literal[string] keyword[or] identifier[geometry] [ literal[string] ]== literal[string] : identifier[coordinates] = identifier[geometry] [ literal[string] ] keyword[for] identifier[coords] keyword[in] identifier[coordinates] : identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]= identifier[methods] [ identifier[method] ]( identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]) keyword[elif] identifier[geometry] [ literal[string] ]== literal[string] keyword[or] identifier[geometry] [ literal[string] ]== literal[string] : identifier[coordinates] = identifier[geometry] [ literal[string] ] keyword[for] identifier[rings] keyword[in] identifier[coordinates] : keyword[for] identifier[coords] keyword[in] identifier[rings] : identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]= identifier[methods] [ identifier[method] ]( identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]) keyword[elif] identifier[geometry] [ literal[string] ]== literal[string] : identifier[coordinates] = identifier[geometry] [ literal[string] ] keyword[for] identifier[rings] keyword[in] identifier[coordinates] : keyword[for] identifier[lines] keyword[in] identifier[rings] : keyword[for] identifier[coords] keyword[in] identifier[lines] : identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]= identifier[methods] [ identifier[method] ]( identifier[coords] [ literal[int] ], identifier[coords] [ literal[int] ]) keyword[return] identifier[geometry]
def convertor(geometry, method='wgs2gcj'): """ convert wgs84 to gcj referencing by https://github.com/wandergis/coordTransform_py """ if geometry['type'] == 'Point': coords = geometry['coordinates'] (coords[0], coords[1]) = methods[method](coords[0], coords[1]) # depends on [control=['if'], data=[]] elif geometry['type'] == 'LineString' or geometry['type'] == 'MutliPoint': coordinates = geometry['coordinates'] for coords in coordinates: (coords[0], coords[1]) = methods[method](coords[0], coords[1]) # depends on [control=['for'], data=['coords']] # depends on [control=['if'], data=[]] elif geometry['type'] == 'Polygon' or geometry['type'] == 'MultiLineString': coordinates = geometry['coordinates'] for rings in coordinates: for coords in rings: (coords[0], coords[1]) = methods[method](coords[0], coords[1]) # depends on [control=['for'], data=['coords']] # depends on [control=['for'], data=['rings']] # depends on [control=['if'], data=[]] elif geometry['type'] == 'MultiPolygon': coordinates = geometry['coordinates'] for rings in coordinates: for lines in rings: for coords in lines: (coords[0], coords[1]) = methods[method](coords[0], coords[1]) # depends on [control=['for'], data=['coords']] # depends on [control=['for'], data=['lines']] # depends on [control=['for'], data=['rings']] # depends on [control=['if'], data=[]] return geometry
def get_command(self, ctx: click.Context, name: str) -> click.Command: """Return the relevant command given the context and name. .. warning:: This differs substaintially from Flask in that it allows for the inbuilt commands to be overridden. """ info = ctx.ensure_object(ScriptInfo) command = None try: command = info.load_app().cli.get_command(ctx, name) except NoAppException: pass if command is None: command = super().get_command(ctx, name) return command
def function[get_command, parameter[self, ctx, name]]: constant[Return the relevant command given the context and name. .. warning:: This differs substaintially from Flask in that it allows for the inbuilt commands to be overridden. ] variable[info] assign[=] call[name[ctx].ensure_object, parameter[name[ScriptInfo]]] variable[command] assign[=] constant[None] <ast.Try object at 0x7da1b17f89a0> if compare[name[command] is constant[None]] begin[:] variable[command] assign[=] call[call[name[super], parameter[]].get_command, parameter[name[ctx], name[name]]] return[name[command]]
keyword[def] identifier[get_command] ( identifier[self] , identifier[ctx] : identifier[click] . identifier[Context] , identifier[name] : identifier[str] )-> identifier[click] . identifier[Command] : literal[string] identifier[info] = identifier[ctx] . identifier[ensure_object] ( identifier[ScriptInfo] ) identifier[command] = keyword[None] keyword[try] : identifier[command] = identifier[info] . identifier[load_app] (). identifier[cli] . identifier[get_command] ( identifier[ctx] , identifier[name] ) keyword[except] identifier[NoAppException] : keyword[pass] keyword[if] identifier[command] keyword[is] keyword[None] : identifier[command] = identifier[super] (). identifier[get_command] ( identifier[ctx] , identifier[name] ) keyword[return] identifier[command]
def get_command(self, ctx: click.Context, name: str) -> click.Command: """Return the relevant command given the context and name. .. warning:: This differs substaintially from Flask in that it allows for the inbuilt commands to be overridden. """ info = ctx.ensure_object(ScriptInfo) command = None try: command = info.load_app().cli.get_command(ctx, name) # depends on [control=['try'], data=[]] except NoAppException: pass # depends on [control=['except'], data=[]] if command is None: command = super().get_command(ctx, name) # depends on [control=['if'], data=['command']] return command
def add_samples_stats(samples_table, samples): """ Add stats fields to samples table. The following information is added to each row: - Notes (warnings, errors) resulting from the analysis - Number of Events - Acquisition Time (s) The following information is added for each row, for each channel in which fluorescence units have been specified: - Detector voltage (gain) - Amplification type - Mean - Geometric Mean - Median - Mode - Standard Deviation - Coefficient of Variation (CV) - Geometric Standard Deviation - Geometric Coefficient of Variation - Inter-Quartile Range - Robust Coefficient of Variation (RCV) Parameters ---------- samples_table : DataFrame Table specifying samples to analyze. For more information about the fields required in this table, please consult the module's documentation. samples : list FCSData objects from which to calculate statistics. ``samples[i]`` should correspond to ``samples_table.iloc[i]``. Notes ----- Geometric statistics (geometric mean, standard deviation, and geometric coefficient of variation) are defined only for positive data. If there are negative events in any relevant channel of any member of `samples`, geometric statistics will only be calculated on the positive events, and a warning message will be written to the "Analysis Notes" field. """ # The index name is not preserved if samples_table is empty. # Save the index name for later samples_table_index_name = samples_table.index.name # Add per-row info notes = [] n_events = [] acq_time = [] for sample in samples: # Check if sample is an exception, otherwise assume it's an FCSData if isinstance(sample, ExcelUIException): # Print error message notes.append("ERROR: {}".format(str(sample))) n_events.append(np.nan) acq_time.append(np.nan) else: notes.append('') n_events.append(sample.shape[0]) acq_time.append(sample.acquisition_time) samples_table['Analysis Notes'] = notes samples_table['Number of Events'] = n_events samples_table['Acquisition Time (s)'] = acq_time # List of channels that require stats columns headers = list(samples_table.columns) stats_headers = [h for h in headers if re_units.match(h)] stats_channels = [re_units.match(h).group(1) for h in stats_headers] # Iterate through channels for header, channel in zip(stats_headers, stats_channels): # Add empty columns to table samples_table[channel + ' Detector Volt.'] = np.nan samples_table[channel + ' Amp. Type'] = "" samples_table[channel + ' Mean'] = np.nan samples_table[channel + ' Geom. Mean'] = np.nan samples_table[channel + ' Median'] = np.nan samples_table[channel + ' Mode'] = np.nan samples_table[channel + ' Std'] = np.nan samples_table[channel + ' CV'] = np.nan samples_table[channel + ' Geom. Std'] = np.nan samples_table[channel + ' Geom. CV'] = np.nan samples_table[channel + ' IQR'] = np.nan samples_table[channel + ' RCV'] = np.nan for row_id, sample in zip(samples_table.index, samples): # If error, skip if isinstance(sample, ExcelUIException): continue # If units are specified, calculate stats. If not, leave empty. if pd.notnull(samples_table[header][row_id]): # Acquisition settings # Detector voltage # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Detector Volt.', sample.detector_voltage(channel)) else: samples_table.at[row_id, channel + ' Detector Volt.'] = \ sample.detector_voltage(channel) # Amplification type if sample.amplification_type(channel)[0]: amplification_type = "Log" else: amplification_type = "Linear" # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Amp. Type', amplification_type) else: samples_table.at[row_id, channel + ' Amp. Type'] = \ amplification_type # Statistics from event list # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Mean', FlowCal.stats.mean(sample, channel)) samples_table.set_value(row_id, channel + ' Median', FlowCal.stats.median(sample, channel)) samples_table.set_value(row_id, channel + ' Mode', FlowCal.stats.mode(sample, channel)) samples_table.set_value(row_id, channel + ' Std', FlowCal.stats.std(sample, channel)) samples_table.set_value(row_id, channel + ' CV', FlowCal.stats.cv(sample, channel)) samples_table.set_value(row_id, channel + ' IQR', FlowCal.stats.iqr(sample, channel)) samples_table.set_value(row_id, channel + ' RCV', FlowCal.stats.rcv(sample, channel)) else: samples_table.at[row_id, channel + ' Mean'] = \ FlowCal.stats.mean(sample, channel) samples_table.at[row_id, channel + ' Median'] = \ FlowCal.stats.median(sample, channel) samples_table.at[row_id, channel + ' Mode'] = \ FlowCal.stats.mode(sample, channel) samples_table.at[row_id, channel + ' Std'] = \ FlowCal.stats.std(sample, channel) samples_table.at[row_id, channel + ' CV'] = \ FlowCal.stats.cv(sample, channel) samples_table.at[row_id, channel + ' IQR'] = \ FlowCal.stats.iqr(sample, channel) samples_table.at[row_id, channel + ' RCV'] = \ FlowCal.stats.rcv(sample, channel) # For geometric statistics, first check for non-positive events. # If found, throw a warning and calculate statistics on positive # events only. if np.any(sample[:, channel] <= 0): # Separate positive events sample_positive = sample[sample[:, channel] > 0] # Throw warning msg = "Geometric statistics for channel" + \ " {} calculated on positive events".format(channel) + \ " only ({:.1f}%). ".format( 100.*sample_positive.shape[0]/sample.shape[0]) warnings.warn("On sample {}: {}".format(row_id, msg)) # Write warning message to table if samples_table.loc[row_id, 'Analysis Notes']: msg = samples_table.loc[row_id, 'Analysis Notes'] + msg # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): samples_table.set_value(row_id, 'Analysis Notes', msg) else: samples_table.at[row_id, 'Analysis Notes'] = msg else: sample_positive = sample # Calculate and write geometric statistics # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) \ < packaging.version.parse('0.21'): samples_table.set_value( row_id, channel + ' Geom. Mean', FlowCal.stats.gmean(sample_positive, channel)) samples_table.set_value( row_id, channel + ' Geom. Std', FlowCal.stats.gstd(sample_positive, channel)) samples_table.set_value( row_id, channel + ' Geom. CV', FlowCal.stats.gcv(sample_positive, channel)) else: samples_table.at[row_id, channel + ' Geom. Mean'] = \ FlowCal.stats.gmean(sample_positive, channel) samples_table.at[row_id, channel + ' Geom. Std'] = \ FlowCal.stats.gstd(sample_positive, channel) samples_table.at[row_id, channel + ' Geom. CV'] = \ FlowCal.stats.gcv(sample_positive, channel) # Restore index name if table is empty if len(samples_table) == 0: samples_table.index.name = samples_table_index_name
def function[add_samples_stats, parameter[samples_table, samples]]: constant[ Add stats fields to samples table. The following information is added to each row: - Notes (warnings, errors) resulting from the analysis - Number of Events - Acquisition Time (s) The following information is added for each row, for each channel in which fluorescence units have been specified: - Detector voltage (gain) - Amplification type - Mean - Geometric Mean - Median - Mode - Standard Deviation - Coefficient of Variation (CV) - Geometric Standard Deviation - Geometric Coefficient of Variation - Inter-Quartile Range - Robust Coefficient of Variation (RCV) Parameters ---------- samples_table : DataFrame Table specifying samples to analyze. For more information about the fields required in this table, please consult the module's documentation. samples : list FCSData objects from which to calculate statistics. ``samples[i]`` should correspond to ``samples_table.iloc[i]``. Notes ----- Geometric statistics (geometric mean, standard deviation, and geometric coefficient of variation) are defined only for positive data. If there are negative events in any relevant channel of any member of `samples`, geometric statistics will only be calculated on the positive events, and a warning message will be written to the "Analysis Notes" field. ] variable[samples_table_index_name] assign[=] name[samples_table].index.name variable[notes] assign[=] list[[]] variable[n_events] assign[=] list[[]] variable[acq_time] assign[=] list[[]] for taget[name[sample]] in starred[name[samples]] begin[:] if call[name[isinstance], parameter[name[sample], name[ExcelUIException]]] begin[:] call[name[notes].append, parameter[call[constant[ERROR: {}].format, parameter[call[name[str], parameter[name[sample]]]]]]] call[name[n_events].append, parameter[name[np].nan]] call[name[acq_time].append, parameter[name[np].nan]] call[name[samples_table]][constant[Analysis Notes]] assign[=] name[notes] call[name[samples_table]][constant[Number of Events]] assign[=] name[n_events] call[name[samples_table]][constant[Acquisition Time (s)]] assign[=] name[acq_time] variable[headers] assign[=] call[name[list], parameter[name[samples_table].columns]] variable[stats_headers] assign[=] <ast.ListComp object at 0x7da204622e30> variable[stats_channels] assign[=] <ast.ListComp object at 0x7da2046214b0> for taget[tuple[[<ast.Name object at 0x7da204620880>, <ast.Name object at 0x7da2046235e0>]]] in starred[call[name[zip], parameter[name[stats_headers], name[stats_channels]]]] begin[:] call[name[samples_table]][binary_operation[name[channel] + constant[ Detector Volt.]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Amp. Type]]] assign[=] constant[] call[name[samples_table]][binary_operation[name[channel] + constant[ Mean]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Geom. Mean]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Median]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Mode]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Std]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ CV]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Geom. Std]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ Geom. CV]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ IQR]]] assign[=] name[np].nan call[name[samples_table]][binary_operation[name[channel] + constant[ RCV]]] assign[=] name[np].nan for taget[tuple[[<ast.Name object at 0x7da1b1ca1480>, <ast.Name object at 0x7da1b1ca1d80>]]] in starred[call[name[zip], parameter[name[samples_table].index, name[samples]]]] begin[:] if call[name[isinstance], parameter[name[sample], name[ExcelUIException]]] begin[:] continue if call[name[pd].notnull, parameter[call[call[name[samples_table]][name[header]]][name[row_id]]]] begin[:] if compare[call[name[packaging].version.parse, parameter[name[pd].__version__]] less[<] call[name[packaging].version.parse, parameter[constant[0.21]]]] begin[:] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Detector Volt.]], call[name[sample].detector_voltage, parameter[name[channel]]]]] if call[call[name[sample].amplification_type, parameter[name[channel]]]][constant[0]] begin[:] variable[amplification_type] assign[=] constant[Log] if compare[call[name[packaging].version.parse, parameter[name[pd].__version__]] less[<] call[name[packaging].version.parse, parameter[constant[0.21]]]] begin[:] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Amp. Type]], name[amplification_type]]] if compare[call[name[packaging].version.parse, parameter[name[pd].__version__]] less[<] call[name[packaging].version.parse, parameter[constant[0.21]]]] begin[:] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Mean]], call[name[FlowCal].stats.mean, parameter[name[sample], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Median]], call[name[FlowCal].stats.median, parameter[name[sample], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Mode]], call[name[FlowCal].stats.mode, parameter[name[sample], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Std]], call[name[FlowCal].stats.std, parameter[name[sample], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ CV]], call[name[FlowCal].stats.cv, parameter[name[sample], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ IQR]], call[name[FlowCal].stats.iqr, parameter[name[sample], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ RCV]], call[name[FlowCal].stats.rcv, parameter[name[sample], name[channel]]]]] if call[name[np].any, parameter[compare[call[name[sample]][tuple[[<ast.Slice object at 0x7da20e963b80>, <ast.Name object at 0x7da20e961a50>]]] less_or_equal[<=] constant[0]]]] begin[:] variable[sample_positive] assign[=] call[name[sample]][compare[call[name[sample]][tuple[[<ast.Slice object at 0x7da20e960550>, <ast.Name object at 0x7da20e962110>]]] greater[>] constant[0]]] variable[msg] assign[=] binary_operation[binary_operation[constant[Geometric statistics for channel] + call[constant[ {} calculated on positive events].format, parameter[name[channel]]]] + call[constant[ only ({:.1f}%). ].format, parameter[binary_operation[binary_operation[constant[100.0] * call[name[sample_positive].shape][constant[0]]] / call[name[sample].shape][constant[0]]]]]] call[name[warnings].warn, parameter[call[constant[On sample {}: {}].format, parameter[name[row_id], name[msg]]]]] if call[name[samples_table].loc][tuple[[<ast.Name object at 0x7da18dc06560>, <ast.Constant object at 0x7da18dc04520>]]] begin[:] variable[msg] assign[=] binary_operation[call[name[samples_table].loc][tuple[[<ast.Name object at 0x7da18dc07ee0>, <ast.Constant object at 0x7da18dc04250>]]] + name[msg]] if compare[call[name[packaging].version.parse, parameter[name[pd].__version__]] less[<] call[name[packaging].version.parse, parameter[constant[0.21]]]] begin[:] call[name[samples_table].set_value, parameter[name[row_id], constant[Analysis Notes], name[msg]]] if compare[call[name[packaging].version.parse, parameter[name[pd].__version__]] less[<] call[name[packaging].version.parse, parameter[constant[0.21]]]] begin[:] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Geom. Mean]], call[name[FlowCal].stats.gmean, parameter[name[sample_positive], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Geom. Std]], call[name[FlowCal].stats.gstd, parameter[name[sample_positive], name[channel]]]]] call[name[samples_table].set_value, parameter[name[row_id], binary_operation[name[channel] + constant[ Geom. CV]], call[name[FlowCal].stats.gcv, parameter[name[sample_positive], name[channel]]]]] if compare[call[name[len], parameter[name[samples_table]]] equal[==] constant[0]] begin[:] name[samples_table].index.name assign[=] name[samples_table_index_name]
keyword[def] identifier[add_samples_stats] ( identifier[samples_table] , identifier[samples] ): literal[string] identifier[samples_table_index_name] = identifier[samples_table] . identifier[index] . identifier[name] identifier[notes] =[] identifier[n_events] =[] identifier[acq_time] =[] keyword[for] identifier[sample] keyword[in] identifier[samples] : keyword[if] identifier[isinstance] ( identifier[sample] , identifier[ExcelUIException] ): identifier[notes] . identifier[append] ( literal[string] . identifier[format] ( identifier[str] ( identifier[sample] ))) identifier[n_events] . identifier[append] ( identifier[np] . identifier[nan] ) identifier[acq_time] . identifier[append] ( identifier[np] . identifier[nan] ) keyword[else] : identifier[notes] . identifier[append] ( literal[string] ) identifier[n_events] . identifier[append] ( identifier[sample] . identifier[shape] [ literal[int] ]) identifier[acq_time] . identifier[append] ( identifier[sample] . identifier[acquisition_time] ) identifier[samples_table] [ literal[string] ]= identifier[notes] identifier[samples_table] [ literal[string] ]= identifier[n_events] identifier[samples_table] [ literal[string] ]= identifier[acq_time] identifier[headers] = identifier[list] ( identifier[samples_table] . identifier[columns] ) identifier[stats_headers] =[ identifier[h] keyword[for] identifier[h] keyword[in] identifier[headers] keyword[if] identifier[re_units] . identifier[match] ( identifier[h] )] identifier[stats_channels] =[ identifier[re_units] . identifier[match] ( identifier[h] ). identifier[group] ( literal[int] ) keyword[for] identifier[h] keyword[in] identifier[stats_headers] ] keyword[for] identifier[header] , identifier[channel] keyword[in] identifier[zip] ( identifier[stats_headers] , identifier[stats_channels] ): identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= literal[string] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] identifier[samples_table] [ identifier[channel] + literal[string] ]= identifier[np] . identifier[nan] keyword[for] identifier[row_id] , identifier[sample] keyword[in] identifier[zip] ( identifier[samples_table] . identifier[index] , identifier[samples] ): keyword[if] identifier[isinstance] ( identifier[sample] , identifier[ExcelUIException] ): keyword[continue] keyword[if] identifier[pd] . identifier[notnull] ( identifier[samples_table] [ identifier[header] ][ identifier[row_id] ]): keyword[if] identifier[packaging] . identifier[version] . identifier[parse] ( identifier[pd] . identifier[__version__] )< identifier[packaging] . identifier[version] . identifier[parse] ( literal[string] ): identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[sample] . identifier[detector_voltage] ( identifier[channel] )) keyword[else] : identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[sample] . identifier[detector_voltage] ( identifier[channel] ) keyword[if] identifier[sample] . identifier[amplification_type] ( identifier[channel] )[ literal[int] ]: identifier[amplification_type] = literal[string] keyword[else] : identifier[amplification_type] = literal[string] keyword[if] identifier[packaging] . identifier[version] . identifier[parse] ( identifier[pd] . identifier[__version__] )< identifier[packaging] . identifier[version] . identifier[parse] ( literal[string] ): identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[amplification_type] ) keyword[else] : identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[amplification_type] keyword[if] identifier[packaging] . identifier[version] . identifier[parse] ( identifier[pd] . identifier[__version__] )< identifier[packaging] . identifier[version] . identifier[parse] ( literal[string] ): identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[mean] ( identifier[sample] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[median] ( identifier[sample] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[mode] ( identifier[sample] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[std] ( identifier[sample] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[cv] ( identifier[sample] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[iqr] ( identifier[sample] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[rcv] ( identifier[sample] , identifier[channel] )) keyword[else] : identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[mean] ( identifier[sample] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[median] ( identifier[sample] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[mode] ( identifier[sample] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[std] ( identifier[sample] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[cv] ( identifier[sample] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[iqr] ( identifier[sample] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[rcv] ( identifier[sample] , identifier[channel] ) keyword[if] identifier[np] . identifier[any] ( identifier[sample] [:, identifier[channel] ]<= literal[int] ): identifier[sample_positive] = identifier[sample] [ identifier[sample] [:, identifier[channel] ]> literal[int] ] identifier[msg] = literal[string] + literal[string] . identifier[format] ( identifier[channel] )+ literal[string] . identifier[format] ( literal[int] * identifier[sample_positive] . identifier[shape] [ literal[int] ]/ identifier[sample] . identifier[shape] [ literal[int] ]) identifier[warnings] . identifier[warn] ( literal[string] . identifier[format] ( identifier[row_id] , identifier[msg] )) keyword[if] identifier[samples_table] . identifier[loc] [ identifier[row_id] , literal[string] ]: identifier[msg] = identifier[samples_table] . identifier[loc] [ identifier[row_id] , literal[string] ]+ identifier[msg] keyword[if] identifier[packaging] . identifier[version] . identifier[parse] ( identifier[pd] . identifier[__version__] )< identifier[packaging] . identifier[version] . identifier[parse] ( literal[string] ): identifier[samples_table] . identifier[set_value] ( identifier[row_id] , literal[string] , identifier[msg] ) keyword[else] : identifier[samples_table] . identifier[at] [ identifier[row_id] , literal[string] ]= identifier[msg] keyword[else] : identifier[sample_positive] = identifier[sample] keyword[if] identifier[packaging] . identifier[version] . identifier[parse] ( identifier[pd] . identifier[__version__] )< identifier[packaging] . identifier[version] . identifier[parse] ( literal[string] ): identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[gmean] ( identifier[sample_positive] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[gstd] ( identifier[sample_positive] , identifier[channel] )) identifier[samples_table] . identifier[set_value] ( identifier[row_id] , identifier[channel] + literal[string] , identifier[FlowCal] . identifier[stats] . identifier[gcv] ( identifier[sample_positive] , identifier[channel] )) keyword[else] : identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[gmean] ( identifier[sample_positive] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[gstd] ( identifier[sample_positive] , identifier[channel] ) identifier[samples_table] . identifier[at] [ identifier[row_id] , identifier[channel] + literal[string] ]= identifier[FlowCal] . identifier[stats] . identifier[gcv] ( identifier[sample_positive] , identifier[channel] ) keyword[if] identifier[len] ( identifier[samples_table] )== literal[int] : identifier[samples_table] . identifier[index] . identifier[name] = identifier[samples_table_index_name]
def add_samples_stats(samples_table, samples): """ Add stats fields to samples table. The following information is added to each row: - Notes (warnings, errors) resulting from the analysis - Number of Events - Acquisition Time (s) The following information is added for each row, for each channel in which fluorescence units have been specified: - Detector voltage (gain) - Amplification type - Mean - Geometric Mean - Median - Mode - Standard Deviation - Coefficient of Variation (CV) - Geometric Standard Deviation - Geometric Coefficient of Variation - Inter-Quartile Range - Robust Coefficient of Variation (RCV) Parameters ---------- samples_table : DataFrame Table specifying samples to analyze. For more information about the fields required in this table, please consult the module's documentation. samples : list FCSData objects from which to calculate statistics. ``samples[i]`` should correspond to ``samples_table.iloc[i]``. Notes ----- Geometric statistics (geometric mean, standard deviation, and geometric coefficient of variation) are defined only for positive data. If there are negative events in any relevant channel of any member of `samples`, geometric statistics will only be calculated on the positive events, and a warning message will be written to the "Analysis Notes" field. """ # The index name is not preserved if samples_table is empty. # Save the index name for later samples_table_index_name = samples_table.index.name # Add per-row info notes = [] n_events = [] acq_time = [] for sample in samples: # Check if sample is an exception, otherwise assume it's an FCSData if isinstance(sample, ExcelUIException): # Print error message notes.append('ERROR: {}'.format(str(sample))) n_events.append(np.nan) acq_time.append(np.nan) # depends on [control=['if'], data=[]] else: notes.append('') n_events.append(sample.shape[0]) acq_time.append(sample.acquisition_time) # depends on [control=['for'], data=['sample']] samples_table['Analysis Notes'] = notes samples_table['Number of Events'] = n_events samples_table['Acquisition Time (s)'] = acq_time # List of channels that require stats columns headers = list(samples_table.columns) stats_headers = [h for h in headers if re_units.match(h)] stats_channels = [re_units.match(h).group(1) for h in stats_headers] # Iterate through channels for (header, channel) in zip(stats_headers, stats_channels): # Add empty columns to table samples_table[channel + ' Detector Volt.'] = np.nan samples_table[channel + ' Amp. Type'] = '' samples_table[channel + ' Mean'] = np.nan samples_table[channel + ' Geom. Mean'] = np.nan samples_table[channel + ' Median'] = np.nan samples_table[channel + ' Mode'] = np.nan samples_table[channel + ' Std'] = np.nan samples_table[channel + ' CV'] = np.nan samples_table[channel + ' Geom. Std'] = np.nan samples_table[channel + ' Geom. CV'] = np.nan samples_table[channel + ' IQR'] = np.nan samples_table[channel + ' RCV'] = np.nan for (row_id, sample) in zip(samples_table.index, samples): # If error, skip if isinstance(sample, ExcelUIException): continue # depends on [control=['if'], data=[]] # If units are specified, calculate stats. If not, leave empty. if pd.notnull(samples_table[header][row_id]): # Acquisition settings # Detector voltage # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Detector Volt.', sample.detector_voltage(channel)) # depends on [control=['if'], data=[]] else: samples_table.at[row_id, channel + ' Detector Volt.'] = sample.detector_voltage(channel) # Amplification type if sample.amplification_type(channel)[0]: amplification_type = 'Log' # depends on [control=['if'], data=[]] else: amplification_type = 'Linear' # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Amp. Type', amplification_type) # depends on [control=['if'], data=[]] else: samples_table.at[row_id, channel + ' Amp. Type'] = amplification_type # Statistics from event list # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Mean', FlowCal.stats.mean(sample, channel)) samples_table.set_value(row_id, channel + ' Median', FlowCal.stats.median(sample, channel)) samples_table.set_value(row_id, channel + ' Mode', FlowCal.stats.mode(sample, channel)) samples_table.set_value(row_id, channel + ' Std', FlowCal.stats.std(sample, channel)) samples_table.set_value(row_id, channel + ' CV', FlowCal.stats.cv(sample, channel)) samples_table.set_value(row_id, channel + ' IQR', FlowCal.stats.iqr(sample, channel)) samples_table.set_value(row_id, channel + ' RCV', FlowCal.stats.rcv(sample, channel)) # depends on [control=['if'], data=[]] else: samples_table.at[row_id, channel + ' Mean'] = FlowCal.stats.mean(sample, channel) samples_table.at[row_id, channel + ' Median'] = FlowCal.stats.median(sample, channel) samples_table.at[row_id, channel + ' Mode'] = FlowCal.stats.mode(sample, channel) samples_table.at[row_id, channel + ' Std'] = FlowCal.stats.std(sample, channel) samples_table.at[row_id, channel + ' CV'] = FlowCal.stats.cv(sample, channel) samples_table.at[row_id, channel + ' IQR'] = FlowCal.stats.iqr(sample, channel) samples_table.at[row_id, channel + ' RCV'] = FlowCal.stats.rcv(sample, channel) # For geometric statistics, first check for non-positive events. # If found, throw a warning and calculate statistics on positive # events only. if np.any(sample[:, channel] <= 0): # Separate positive events sample_positive = sample[sample[:, channel] > 0] # Throw warning msg = 'Geometric statistics for channel' + ' {} calculated on positive events'.format(channel) + ' only ({:.1f}%). '.format(100.0 * sample_positive.shape[0] / sample.shape[0]) warnings.warn('On sample {}: {}'.format(row_id, msg)) # Write warning message to table if samples_table.loc[row_id, 'Analysis Notes']: msg = samples_table.loc[row_id, 'Analysis Notes'] + msg # depends on [control=['if'], data=[]] # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) < packaging.version.parse('0.21'): samples_table.set_value(row_id, 'Analysis Notes', msg) # depends on [control=['if'], data=[]] else: samples_table.at[row_id, 'Analysis Notes'] = msg # depends on [control=['if'], data=[]] else: sample_positive = sample # Calculate and write geometric statistics # Dataframes, such as samples_table, are modified # differently depending on pandas' version. if packaging.version.parse(pd.__version__) < packaging.version.parse('0.21'): samples_table.set_value(row_id, channel + ' Geom. Mean', FlowCal.stats.gmean(sample_positive, channel)) samples_table.set_value(row_id, channel + ' Geom. Std', FlowCal.stats.gstd(sample_positive, channel)) samples_table.set_value(row_id, channel + ' Geom. CV', FlowCal.stats.gcv(sample_positive, channel)) # depends on [control=['if'], data=[]] else: samples_table.at[row_id, channel + ' Geom. Mean'] = FlowCal.stats.gmean(sample_positive, channel) samples_table.at[row_id, channel + ' Geom. Std'] = FlowCal.stats.gstd(sample_positive, channel) samples_table.at[row_id, channel + ' Geom. CV'] = FlowCal.stats.gcv(sample_positive, channel) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # Restore index name if table is empty if len(samples_table) == 0: samples_table.index.name = samples_table_index_name # depends on [control=['if'], data=[]]
def get_module_parser(mod, modname, parents=[], add_help=True): """ Returns an argument parser for the sub-command's CLI. :param mod: the sub-command's python module :param modnam: the string name of the python module :return: ArgumentParser """ return argparse.ArgumentParser( usage=configuration.EXECUTABLE_NAME + ' ' + modname + ' [options]', description=mod.get_description(), parents=parents, add_help=add_help)
def function[get_module_parser, parameter[mod, modname, parents, add_help]]: constant[ Returns an argument parser for the sub-command's CLI. :param mod: the sub-command's python module :param modnam: the string name of the python module :return: ArgumentParser ] return[call[name[argparse].ArgumentParser, parameter[]]]
keyword[def] identifier[get_module_parser] ( identifier[mod] , identifier[modname] , identifier[parents] =[], identifier[add_help] = keyword[True] ): literal[string] keyword[return] identifier[argparse] . identifier[ArgumentParser] ( identifier[usage] = identifier[configuration] . identifier[EXECUTABLE_NAME] + literal[string] + identifier[modname] + literal[string] , identifier[description] = identifier[mod] . identifier[get_description] (), identifier[parents] = identifier[parents] , identifier[add_help] = identifier[add_help] )
def get_module_parser(mod, modname, parents=[], add_help=True): """ Returns an argument parser for the sub-command's CLI. :param mod: the sub-command's python module :param modnam: the string name of the python module :return: ArgumentParser """ return argparse.ArgumentParser(usage=configuration.EXECUTABLE_NAME + ' ' + modname + ' [options]', description=mod.get_description(), parents=parents, add_help=add_help)
def write_gif(filename, images, duration=0.1, repeat=True, dither=False, nq=0, sub_rectangles=True, dispose=None): """ write_gif(filename, images, duration=0.1, repeat=True, dither=False, nq=0, sub_rectangles=True, dispose=None) Write an animated gif from the specified images. Parameters ---------- filename : string The name of the file to write the image to. images : list Should be a list consisting of PIL images or numpy arrays. The latter should be between 0 and 255 for integer types, and between 0 and 1 for float types. duration : scalar or list of scalars The duration for all frames, or (if a list) for each frame. repeat : bool or integer The amount of loops. If True, loops infinitetely. dither : bool Whether to apply dithering nq : integer If nonzero, applies the NeuQuant quantization algorithm to create the color palette. This algorithm is superior, but slower than the standard PIL algorithm. The value of nq is the quality parameter. 1 represents the best quality. 10 is in general a good tradeoff between quality and speed. When using this option, better results are usually obtained when sub_rectangles is False. sub_rectangles : False, True, or a list of 2-element tuples Whether to use sub-rectangles. If True, the minimal rectangle that is required to update each frame is automatically detected. This can give significant reductions in file size, particularly if only a part of the image changes. One can also give a list of x-y coordinates if you want to do the cropping yourself. The default is True. dispose : int How to dispose each frame. 1 means that each frame is to be left in place. 2 means the background color should be restored after each frame. 3 means the decoder should restore the previous frame. If sub_rectangles==False, the default is 2, otherwise it is 1. """ # Check PIL if PIL is None: raise RuntimeError("Need PIL to write animated gif files.") # Check images images = check_images(images) # Instantiate writer object gif_writer = GifWriter() gif_writer.transparency = False # init transparency flag used in GifWriter functions # Check loops if repeat is False: loops = 1 elif repeat is True: loops = 0 # zero means infinite else: loops = int(repeat) # Check duration if hasattr(duration, '__len__'): if len(duration) == len(images): duration = [d for d in duration] else: raise ValueError("len(duration) doesn't match amount of images.") else: duration = [duration for im in images] # Check subrectangles if sub_rectangles: images, xy, images_info = gif_writer.handle_sub_rectangles(images, sub_rectangles) default_dispose = 1 # Leave image in place else: # Normal mode xy = [(0, 0) for im in images] default_dispose = 2 # Restore to background color. # Check dispose if dispose is None: dispose = default_dispose if hasattr(dispose, '__len__'): if len(dispose) != len(images): raise ValueError("len(xy) doesn't match amount of images.") else: dispose = [dispose for im in images] # Make images in a format that we can write easy images = gif_writer.convert_images_to_pil(images, dither, nq) # Write if isinstance(filename, basestring): fp = open(filename, 'wb') elif hasattr(filename, 'write'): fp = filename else: return try: gif_writer.write_gif_to_file(fp, images, duration, loops, xy, dispose) finally: fp.close()
def function[write_gif, parameter[filename, images, duration, repeat, dither, nq, sub_rectangles, dispose]]: constant[ write_gif(filename, images, duration=0.1, repeat=True, dither=False, nq=0, sub_rectangles=True, dispose=None) Write an animated gif from the specified images. Parameters ---------- filename : string The name of the file to write the image to. images : list Should be a list consisting of PIL images or numpy arrays. The latter should be between 0 and 255 for integer types, and between 0 and 1 for float types. duration : scalar or list of scalars The duration for all frames, or (if a list) for each frame. repeat : bool or integer The amount of loops. If True, loops infinitetely. dither : bool Whether to apply dithering nq : integer If nonzero, applies the NeuQuant quantization algorithm to create the color palette. This algorithm is superior, but slower than the standard PIL algorithm. The value of nq is the quality parameter. 1 represents the best quality. 10 is in general a good tradeoff between quality and speed. When using this option, better results are usually obtained when sub_rectangles is False. sub_rectangles : False, True, or a list of 2-element tuples Whether to use sub-rectangles. If True, the minimal rectangle that is required to update each frame is automatically detected. This can give significant reductions in file size, particularly if only a part of the image changes. One can also give a list of x-y coordinates if you want to do the cropping yourself. The default is True. dispose : int How to dispose each frame. 1 means that each frame is to be left in place. 2 means the background color should be restored after each frame. 3 means the decoder should restore the previous frame. If sub_rectangles==False, the default is 2, otherwise it is 1. ] if compare[name[PIL] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1397b20> variable[images] assign[=] call[name[check_images], parameter[name[images]]] variable[gif_writer] assign[=] call[name[GifWriter], parameter[]] name[gif_writer].transparency assign[=] constant[False] if compare[name[repeat] is constant[False]] begin[:] variable[loops] assign[=] constant[1] if call[name[hasattr], parameter[name[duration], constant[__len__]]] begin[:] if compare[call[name[len], parameter[name[duration]]] equal[==] call[name[len], parameter[name[images]]]] begin[:] variable[duration] assign[=] <ast.ListComp object at 0x7da1b1397190> if name[sub_rectangles] begin[:] <ast.Tuple object at 0x7da1b1396da0> assign[=] call[name[gif_writer].handle_sub_rectangles, parameter[name[images], name[sub_rectangles]]] variable[default_dispose] assign[=] constant[1] if compare[name[dispose] is constant[None]] begin[:] variable[dispose] assign[=] name[default_dispose] if call[name[hasattr], parameter[name[dispose], constant[__len__]]] begin[:] if compare[call[name[len], parameter[name[dispose]]] not_equal[!=] call[name[len], parameter[name[images]]]] begin[:] <ast.Raise object at 0x7da1b1335f30> variable[images] assign[=] call[name[gif_writer].convert_images_to_pil, parameter[name[images], name[dither], name[nq]]] if call[name[isinstance], parameter[name[filename], name[basestring]]] begin[:] variable[fp] assign[=] call[name[open], parameter[name[filename], constant[wb]]] <ast.Try object at 0x7da1b1335780>
keyword[def] identifier[write_gif] ( identifier[filename] , identifier[images] , identifier[duration] = literal[int] , identifier[repeat] = keyword[True] , identifier[dither] = keyword[False] , identifier[nq] = literal[int] , identifier[sub_rectangles] = keyword[True] , identifier[dispose] = keyword[None] ): literal[string] keyword[if] identifier[PIL] keyword[is] keyword[None] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[images] = identifier[check_images] ( identifier[images] ) identifier[gif_writer] = identifier[GifWriter] () identifier[gif_writer] . identifier[transparency] = keyword[False] keyword[if] identifier[repeat] keyword[is] keyword[False] : identifier[loops] = literal[int] keyword[elif] identifier[repeat] keyword[is] keyword[True] : identifier[loops] = literal[int] keyword[else] : identifier[loops] = identifier[int] ( identifier[repeat] ) keyword[if] identifier[hasattr] ( identifier[duration] , literal[string] ): keyword[if] identifier[len] ( identifier[duration] )== identifier[len] ( identifier[images] ): identifier[duration] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[duration] ] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : identifier[duration] =[ identifier[duration] keyword[for] identifier[im] keyword[in] identifier[images] ] keyword[if] identifier[sub_rectangles] : identifier[images] , identifier[xy] , identifier[images_info] = identifier[gif_writer] . identifier[handle_sub_rectangles] ( identifier[images] , identifier[sub_rectangles] ) identifier[default_dispose] = literal[int] keyword[else] : identifier[xy] =[( literal[int] , literal[int] ) keyword[for] identifier[im] keyword[in] identifier[images] ] identifier[default_dispose] = literal[int] keyword[if] identifier[dispose] keyword[is] keyword[None] : identifier[dispose] = identifier[default_dispose] keyword[if] identifier[hasattr] ( identifier[dispose] , literal[string] ): keyword[if] identifier[len] ( identifier[dispose] )!= identifier[len] ( identifier[images] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[else] : identifier[dispose] =[ identifier[dispose] keyword[for] identifier[im] keyword[in] identifier[images] ] identifier[images] = identifier[gif_writer] . identifier[convert_images_to_pil] ( identifier[images] , identifier[dither] , identifier[nq] ) keyword[if] identifier[isinstance] ( identifier[filename] , identifier[basestring] ): identifier[fp] = identifier[open] ( identifier[filename] , literal[string] ) keyword[elif] identifier[hasattr] ( identifier[filename] , literal[string] ): identifier[fp] = identifier[filename] keyword[else] : keyword[return] keyword[try] : identifier[gif_writer] . identifier[write_gif_to_file] ( identifier[fp] , identifier[images] , identifier[duration] , identifier[loops] , identifier[xy] , identifier[dispose] ) keyword[finally] : identifier[fp] . identifier[close] ()
def write_gif(filename, images, duration=0.1, repeat=True, dither=False, nq=0, sub_rectangles=True, dispose=None): """ write_gif(filename, images, duration=0.1, repeat=True, dither=False, nq=0, sub_rectangles=True, dispose=None) Write an animated gif from the specified images. Parameters ---------- filename : string The name of the file to write the image to. images : list Should be a list consisting of PIL images or numpy arrays. The latter should be between 0 and 255 for integer types, and between 0 and 1 for float types. duration : scalar or list of scalars The duration for all frames, or (if a list) for each frame. repeat : bool or integer The amount of loops. If True, loops infinitetely. dither : bool Whether to apply dithering nq : integer If nonzero, applies the NeuQuant quantization algorithm to create the color palette. This algorithm is superior, but slower than the standard PIL algorithm. The value of nq is the quality parameter. 1 represents the best quality. 10 is in general a good tradeoff between quality and speed. When using this option, better results are usually obtained when sub_rectangles is False. sub_rectangles : False, True, or a list of 2-element tuples Whether to use sub-rectangles. If True, the minimal rectangle that is required to update each frame is automatically detected. This can give significant reductions in file size, particularly if only a part of the image changes. One can also give a list of x-y coordinates if you want to do the cropping yourself. The default is True. dispose : int How to dispose each frame. 1 means that each frame is to be left in place. 2 means the background color should be restored after each frame. 3 means the decoder should restore the previous frame. If sub_rectangles==False, the default is 2, otherwise it is 1. """ # Check PIL if PIL is None: raise RuntimeError('Need PIL to write animated gif files.') # depends on [control=['if'], data=[]] # Check images images = check_images(images) # Instantiate writer object gif_writer = GifWriter() gif_writer.transparency = False # init transparency flag used in GifWriter functions # Check loops if repeat is False: loops = 1 # depends on [control=['if'], data=[]] elif repeat is True: loops = 0 # zero means infinite # depends on [control=['if'], data=[]] else: loops = int(repeat) # Check duration if hasattr(duration, '__len__'): if len(duration) == len(images): duration = [d for d in duration] # depends on [control=['if'], data=[]] else: raise ValueError("len(duration) doesn't match amount of images.") # depends on [control=['if'], data=[]] else: duration = [duration for im in images] # Check subrectangles if sub_rectangles: (images, xy, images_info) = gif_writer.handle_sub_rectangles(images, sub_rectangles) default_dispose = 1 # Leave image in place # depends on [control=['if'], data=[]] else: # Normal mode xy = [(0, 0) for im in images] default_dispose = 2 # Restore to background color. # Check dispose if dispose is None: dispose = default_dispose # depends on [control=['if'], data=['dispose']] if hasattr(dispose, '__len__'): if len(dispose) != len(images): raise ValueError("len(xy) doesn't match amount of images.") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: dispose = [dispose for im in images] # Make images in a format that we can write easy images = gif_writer.convert_images_to_pil(images, dither, nq) # Write if isinstance(filename, basestring): fp = open(filename, 'wb') # depends on [control=['if'], data=[]] elif hasattr(filename, 'write'): fp = filename # depends on [control=['if'], data=[]] else: return try: gif_writer.write_gif_to_file(fp, images, duration, loops, xy, dispose) # depends on [control=['try'], data=[]] finally: fp.close()
def fastas(self, download=False): """ Dict of filepaths for all fasta files associated with code. Parameters ---------- download : bool If True, downloads the fasta file from the PDB. If False, uses the ampal Protein.fasta property Defaults to False - this is definitely the recommended behaviour. Notes ----- Calls self.mmols, and so downloads mmol files if not already present. See .fasta property of isambard.ampal.base_ampal.Protein for more information. Returns ------- fastas_dict : dict, or None. Keys : int mmol number Values : str Filepath for the corresponding fasta file. """ fastas_dict = {} fasta_dir = os.path.join(self.parent_dir, 'fasta') if not os.path.exists(fasta_dir): os.makedirs(fasta_dir) for i, mmol_file in self.mmols.items(): mmol_name = os.path.basename(mmol_file) fasta_file_name = '{0}.fasta'.format(mmol_name) fasta_file = os.path.join(fasta_dir, fasta_file_name) if not os.path.exists(fasta_file): if download: pdb_url = "http://www.rcsb.org/pdb/files/fasta.txt?structureIdList={0}".format(self.code.upper()) r = requests.get(pdb_url) if r.status_code == 200: fasta_string = r.text else: fasta_string = None else: a = convert_pdb_to_ampal(mmol_file) # take first object if AmpalContainer (i.e. NMR structure). if type(a) == AmpalContainer: a = a[0] fasta_string = a.fasta with open(fasta_file, 'w') as foo: foo.write(fasta_string) fastas_dict[i] = fasta_file return fastas_dict
def function[fastas, parameter[self, download]]: constant[ Dict of filepaths for all fasta files associated with code. Parameters ---------- download : bool If True, downloads the fasta file from the PDB. If False, uses the ampal Protein.fasta property Defaults to False - this is definitely the recommended behaviour. Notes ----- Calls self.mmols, and so downloads mmol files if not already present. See .fasta property of isambard.ampal.base_ampal.Protein for more information. Returns ------- fastas_dict : dict, or None. Keys : int mmol number Values : str Filepath for the corresponding fasta file. ] variable[fastas_dict] assign[=] dictionary[[], []] variable[fasta_dir] assign[=] call[name[os].path.join, parameter[name[self].parent_dir, constant[fasta]]] if <ast.UnaryOp object at 0x7da1b2657760> begin[:] call[name[os].makedirs, parameter[name[fasta_dir]]] for taget[tuple[[<ast.Name object at 0x7da1b2649d20>, <ast.Name object at 0x7da1b2829fc0>]]] in starred[call[name[self].mmols.items, parameter[]]] begin[:] variable[mmol_name] assign[=] call[name[os].path.basename, parameter[name[mmol_file]]] variable[fasta_file_name] assign[=] call[constant[{0}.fasta].format, parameter[name[mmol_name]]] variable[fasta_file] assign[=] call[name[os].path.join, parameter[name[fasta_dir], name[fasta_file_name]]] if <ast.UnaryOp object at 0x7da1b2829ba0> begin[:] if name[download] begin[:] variable[pdb_url] assign[=] call[constant[http://www.rcsb.org/pdb/files/fasta.txt?structureIdList={0}].format, parameter[call[name[self].code.upper, parameter[]]]] variable[r] assign[=] call[name[requests].get, parameter[name[pdb_url]]] if compare[name[r].status_code equal[==] constant[200]] begin[:] variable[fasta_string] assign[=] name[r].text with call[name[open], parameter[name[fasta_file], constant[w]]] begin[:] call[name[foo].write, parameter[name[fasta_string]]] call[name[fastas_dict]][name[i]] assign[=] name[fasta_file] return[name[fastas_dict]]
keyword[def] identifier[fastas] ( identifier[self] , identifier[download] = keyword[False] ): literal[string] identifier[fastas_dict] ={} identifier[fasta_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[parent_dir] , literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[fasta_dir] ): identifier[os] . identifier[makedirs] ( identifier[fasta_dir] ) keyword[for] identifier[i] , identifier[mmol_file] keyword[in] identifier[self] . identifier[mmols] . identifier[items] (): identifier[mmol_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[mmol_file] ) identifier[fasta_file_name] = literal[string] . identifier[format] ( identifier[mmol_name] ) identifier[fasta_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[fasta_dir] , identifier[fasta_file_name] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[fasta_file] ): keyword[if] identifier[download] : identifier[pdb_url] = literal[string] . identifier[format] ( identifier[self] . identifier[code] . identifier[upper] ()) identifier[r] = identifier[requests] . identifier[get] ( identifier[pdb_url] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : identifier[fasta_string] = identifier[r] . identifier[text] keyword[else] : identifier[fasta_string] = keyword[None] keyword[else] : identifier[a] = identifier[convert_pdb_to_ampal] ( identifier[mmol_file] ) keyword[if] identifier[type] ( identifier[a] )== identifier[AmpalContainer] : identifier[a] = identifier[a] [ literal[int] ] identifier[fasta_string] = identifier[a] . identifier[fasta] keyword[with] identifier[open] ( identifier[fasta_file] , literal[string] ) keyword[as] identifier[foo] : identifier[foo] . identifier[write] ( identifier[fasta_string] ) identifier[fastas_dict] [ identifier[i] ]= identifier[fasta_file] keyword[return] identifier[fastas_dict]
def fastas(self, download=False): """ Dict of filepaths for all fasta files associated with code. Parameters ---------- download : bool If True, downloads the fasta file from the PDB. If False, uses the ampal Protein.fasta property Defaults to False - this is definitely the recommended behaviour. Notes ----- Calls self.mmols, and so downloads mmol files if not already present. See .fasta property of isambard.ampal.base_ampal.Protein for more information. Returns ------- fastas_dict : dict, or None. Keys : int mmol number Values : str Filepath for the corresponding fasta file. """ fastas_dict = {} fasta_dir = os.path.join(self.parent_dir, 'fasta') if not os.path.exists(fasta_dir): os.makedirs(fasta_dir) # depends on [control=['if'], data=[]] for (i, mmol_file) in self.mmols.items(): mmol_name = os.path.basename(mmol_file) fasta_file_name = '{0}.fasta'.format(mmol_name) fasta_file = os.path.join(fasta_dir, fasta_file_name) if not os.path.exists(fasta_file): if download: pdb_url = 'http://www.rcsb.org/pdb/files/fasta.txt?structureIdList={0}'.format(self.code.upper()) r = requests.get(pdb_url) if r.status_code == 200: fasta_string = r.text # depends on [control=['if'], data=[]] else: fasta_string = None # depends on [control=['if'], data=[]] else: a = convert_pdb_to_ampal(mmol_file) # take first object if AmpalContainer (i.e. NMR structure). if type(a) == AmpalContainer: a = a[0] # depends on [control=['if'], data=[]] fasta_string = a.fasta with open(fasta_file, 'w') as foo: foo.write(fasta_string) # depends on [control=['with'], data=['foo']] # depends on [control=['if'], data=[]] fastas_dict[i] = fasta_file # depends on [control=['for'], data=[]] return fastas_dict
def coarse_grain(coarse_grain): """Validate a macro coarse-graining.""" partition(coarse_grain.partition) if len(coarse_grain.partition) != len(coarse_grain.grouping): raise ValueError('output and state groupings must be the same size') for part, group in zip(coarse_grain.partition, coarse_grain.grouping): if set(range(len(part) + 1)) != set(group[0] + group[1]): # Check that all elements in the partition are in one of the two # state groupings raise ValueError('elements in output grouping {0} do not match ' 'elements in state grouping {1}'.format( part, group))
def function[coarse_grain, parameter[coarse_grain]]: constant[Validate a macro coarse-graining.] call[name[partition], parameter[name[coarse_grain].partition]] if compare[call[name[len], parameter[name[coarse_grain].partition]] not_equal[!=] call[name[len], parameter[name[coarse_grain].grouping]]] begin[:] <ast.Raise object at 0x7da20cabd270> for taget[tuple[[<ast.Name object at 0x7da20cabd7e0>, <ast.Name object at 0x7da20cabd870>]]] in starred[call[name[zip], parameter[name[coarse_grain].partition, name[coarse_grain].grouping]]] begin[:] if compare[call[name[set], parameter[call[name[range], parameter[binary_operation[call[name[len], parameter[name[part]]] + constant[1]]]]]] not_equal[!=] call[name[set], parameter[binary_operation[call[name[group]][constant[0]] + call[name[group]][constant[1]]]]]] begin[:] <ast.Raise object at 0x7da1b23478b0>
keyword[def] identifier[coarse_grain] ( identifier[coarse_grain] ): literal[string] identifier[partition] ( identifier[coarse_grain] . identifier[partition] ) keyword[if] identifier[len] ( identifier[coarse_grain] . identifier[partition] )!= identifier[len] ( identifier[coarse_grain] . identifier[grouping] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[part] , identifier[group] keyword[in] identifier[zip] ( identifier[coarse_grain] . identifier[partition] , identifier[coarse_grain] . identifier[grouping] ): keyword[if] identifier[set] ( identifier[range] ( identifier[len] ( identifier[part] )+ literal[int] ))!= identifier[set] ( identifier[group] [ literal[int] ]+ identifier[group] [ literal[int] ]): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[part] , identifier[group] ))
def coarse_grain(coarse_grain): """Validate a macro coarse-graining.""" partition(coarse_grain.partition) if len(coarse_grain.partition) != len(coarse_grain.grouping): raise ValueError('output and state groupings must be the same size') # depends on [control=['if'], data=[]] for (part, group) in zip(coarse_grain.partition, coarse_grain.grouping): if set(range(len(part) + 1)) != set(group[0] + group[1]): # Check that all elements in the partition are in one of the two # state groupings raise ValueError('elements in output grouping {0} do not match elements in state grouping {1}'.format(part, group)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def register_monitor(self, devices, events, callback): """Register a callback when events happen. If this method is called, it is guaranteed to take effect before the next call to ``_notify_event`` after this method returns. This method is safe to call from within a callback that is itself called by ``notify_event``. See :meth:`AbstractDeviceAdapter.register_monitor`. """ # Ensure we don't exhaust any iterables events = list(events) devices = list(devices) for event in events: if event not in self.SUPPORTED_EVENTS: raise ArgumentError("Unknown event type {} specified".format(event), events=events) monitor_id = str(uuid.uuid4()) action = (monitor_id, "add", devices, events) self._callbacks[monitor_id] = callback if self._currently_notifying: self._deferred_adjustments.append(action) else: self._adjust_monitor_internal(*action) return monitor_id
def function[register_monitor, parameter[self, devices, events, callback]]: constant[Register a callback when events happen. If this method is called, it is guaranteed to take effect before the next call to ``_notify_event`` after this method returns. This method is safe to call from within a callback that is itself called by ``notify_event``. See :meth:`AbstractDeviceAdapter.register_monitor`. ] variable[events] assign[=] call[name[list], parameter[name[events]]] variable[devices] assign[=] call[name[list], parameter[name[devices]]] for taget[name[event]] in starred[name[events]] begin[:] if compare[name[event] <ast.NotIn object at 0x7da2590d7190> name[self].SUPPORTED_EVENTS] begin[:] <ast.Raise object at 0x7da2041d8dc0> variable[monitor_id] assign[=] call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]] variable[action] assign[=] tuple[[<ast.Name object at 0x7da2041da9b0>, <ast.Constant object at 0x7da2041dba60>, <ast.Name object at 0x7da2041db460>, <ast.Name object at 0x7da2041d97e0>]] call[name[self]._callbacks][name[monitor_id]] assign[=] name[callback] if name[self]._currently_notifying begin[:] call[name[self]._deferred_adjustments.append, parameter[name[action]]] return[name[monitor_id]]
keyword[def] identifier[register_monitor] ( identifier[self] , identifier[devices] , identifier[events] , identifier[callback] ): literal[string] identifier[events] = identifier[list] ( identifier[events] ) identifier[devices] = identifier[list] ( identifier[devices] ) keyword[for] identifier[event] keyword[in] identifier[events] : keyword[if] identifier[event] keyword[not] keyword[in] identifier[self] . identifier[SUPPORTED_EVENTS] : keyword[raise] identifier[ArgumentError] ( literal[string] . identifier[format] ( identifier[event] ), identifier[events] = identifier[events] ) identifier[monitor_id] = identifier[str] ( identifier[uuid] . identifier[uuid4] ()) identifier[action] =( identifier[monitor_id] , literal[string] , identifier[devices] , identifier[events] ) identifier[self] . identifier[_callbacks] [ identifier[monitor_id] ]= identifier[callback] keyword[if] identifier[self] . identifier[_currently_notifying] : identifier[self] . identifier[_deferred_adjustments] . identifier[append] ( identifier[action] ) keyword[else] : identifier[self] . identifier[_adjust_monitor_internal] (* identifier[action] ) keyword[return] identifier[monitor_id]
def register_monitor(self, devices, events, callback): """Register a callback when events happen. If this method is called, it is guaranteed to take effect before the next call to ``_notify_event`` after this method returns. This method is safe to call from within a callback that is itself called by ``notify_event``. See :meth:`AbstractDeviceAdapter.register_monitor`. """ # Ensure we don't exhaust any iterables events = list(events) devices = list(devices) for event in events: if event not in self.SUPPORTED_EVENTS: raise ArgumentError('Unknown event type {} specified'.format(event), events=events) # depends on [control=['if'], data=['event']] # depends on [control=['for'], data=['event']] monitor_id = str(uuid.uuid4()) action = (monitor_id, 'add', devices, events) self._callbacks[monitor_id] = callback if self._currently_notifying: self._deferred_adjustments.append(action) # depends on [control=['if'], data=[]] else: self._adjust_monitor_internal(*action) return monitor_id
def is_excel_file(inputfile): """ Return whether the provided file is a CSV file or not. This checks if the first row of the file can be splitted by ',' and if the resulting line contains more than 4 columns (Markers, linkage group, chromosome, trait). """ try: xlrd.open_workbook(inputfile) except Exception as err: print(err) return False return True
def function[is_excel_file, parameter[inputfile]]: constant[ Return whether the provided file is a CSV file or not. This checks if the first row of the file can be splitted by ',' and if the resulting line contains more than 4 columns (Markers, linkage group, chromosome, trait). ] <ast.Try object at 0x7da18f812320> return[constant[True]]
keyword[def] identifier[is_excel_file] ( identifier[inputfile] ): literal[string] keyword[try] : identifier[xlrd] . identifier[open_workbook] ( identifier[inputfile] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : identifier[print] ( identifier[err] ) keyword[return] keyword[False] keyword[return] keyword[True]
def is_excel_file(inputfile): """ Return whether the provided file is a CSV file or not. This checks if the first row of the file can be splitted by ',' and if the resulting line contains more than 4 columns (Markers, linkage group, chromosome, trait). """ try: xlrd.open_workbook(inputfile) # depends on [control=['try'], data=[]] except Exception as err: print(err) return False # depends on [control=['except'], data=['err']] return True
def request_instance(vm_): ''' Request a VM from Azure. ''' compconn = get_conn(client_type='compute') # pylint: disable=invalid-name CachingTypes = getattr( compute_models, 'CachingTypes' ) # pylint: disable=invalid-name DataDisk = getattr( compute_models, 'DataDisk' ) # pylint: disable=invalid-name DiskCreateOptionTypes = getattr( compute_models, 'DiskCreateOptionTypes' ) # pylint: disable=invalid-name HardwareProfile = getattr( compute_models, 'HardwareProfile' ) # pylint: disable=invalid-name ImageReference = getattr( compute_models, 'ImageReference' ) # pylint: disable=invalid-name LinuxConfiguration = getattr( compute_models, 'LinuxConfiguration' ) # pylint: disable=invalid-name SshConfiguration = getattr( compute_models, 'SshConfiguration' ) # pylint: disable=invalid-name SshPublicKey = getattr( compute_models, 'SshPublicKey' ) # pylint: disable=invalid-name NetworkInterfaceReference = getattr( compute_models, 'NetworkInterfaceReference' ) # pylint: disable=invalid-name NetworkProfile = getattr( compute_models, 'NetworkProfile' ) # pylint: disable=invalid-name OSDisk = getattr( compute_models, 'OSDisk' ) # pylint: disable=invalid-name OSProfile = getattr( compute_models, 'OSProfile' ) # pylint: disable=invalid-name StorageProfile = getattr( compute_models, 'StorageProfile' ) # pylint: disable=invalid-name VirtualHardDisk = getattr( compute_models, 'VirtualHardDisk' ) # pylint: disable=invalid-name VirtualMachine = getattr( compute_models, 'VirtualMachine' ) # pylint: disable=invalid-name VirtualMachineSizeTypes = getattr( compute_models, 'VirtualMachineSizeTypes' ) subscription_id = config.get_cloud_config_value( 'subscription_id', get_configured_provider(), __opts__, search_global=False ) if vm_.get('driver') is None: vm_['driver'] = 'azurearm' if vm_.get('location') is None: vm_['location'] = get_location() if vm_.get('resource_group') is None: vm_['resource_group'] = config.get_cloud_config_value( 'resource_group', vm_, __opts__, search_global=True ) if vm_.get('name') is None: vm_['name'] = config.get_cloud_config_value( 'name', vm_, __opts__, search_global=True ) # pylint: disable=unused-variable iface_data, public_ips, private_ips = create_network_interface( call='action', kwargs=vm_ ) vm_['iface_id'] = iface_data['id'] disk_name = '{0}-vol0'.format(vm_['name']) vm_username = config.get_cloud_config_value( 'ssh_username', vm_, __opts__, search_global=True, default=config.get_cloud_config_value( 'win_username', vm_, __opts__, search_global=True ) ) ssh_publickeyfile_contents = None ssh_publickeyfile = config.get_cloud_config_value( 'ssh_publickeyfile', vm_, __opts__, search_global=False, default=None ) if ssh_publickeyfile is not None: try: with salt.utils.files.fopen(ssh_publickeyfile, 'r') as spkc_: ssh_publickeyfile_contents = spkc_.read() except Exception as exc: raise SaltCloudConfigError( "Failed to read ssh publickey file '{0}': " "{1}".format(ssh_publickeyfile, exc.args[-1]) ) disable_password_authentication = config.get_cloud_config_value( 'disable_password_authentication', vm_, __opts__, search_global=False, default=False ) vm_password = salt.utils.stringutils.to_str( config.get_cloud_config_value( 'ssh_password', vm_, __opts__, search_global=True, default=config.get_cloud_config_value( 'win_password', vm_, __opts__, search_global=True ) ) ) os_kwargs = {} win_installer = config.get_cloud_config_value( 'win_installer', vm_, __opts__, search_global=True ) if not win_installer and ssh_publickeyfile_contents is not None: sshpublickey = SshPublicKey( key_data=ssh_publickeyfile_contents, path='/home/{0}/.ssh/authorized_keys'.format(vm_username), ) sshconfiguration = SshConfiguration( public_keys=[sshpublickey], ) linuxconfiguration = LinuxConfiguration( disable_password_authentication=disable_password_authentication, ssh=sshconfiguration, ) os_kwargs['linux_configuration'] = linuxconfiguration if win_installer or (vm_password is not None and not disable_password_authentication): if not isinstance(vm_password, str): raise SaltCloudSystemExit( 'The admin password must be a string.' ) if len(vm_password) < 8 or len(vm_password) > 123: raise SaltCloudSystemExit( 'The admin password must be between 8-123 characters long.' ) complexity = 0 if any(char.isdigit() for char in vm_password): complexity += 1 if any(char.isupper() for char in vm_password): complexity += 1 if any(char.islower() for char in vm_password): complexity += 1 if any(char in string.punctuation for char in vm_password): complexity += 1 if complexity < 3: raise SaltCloudSystemExit( 'The admin password must contain at least 3 of the following types: ' 'upper, lower, digits, special characters' ) os_kwargs['admin_password'] = vm_password availability_set = config.get_cloud_config_value( 'availability_set', vm_, __opts__, search_global=False, default=None ) if availability_set is not None and isinstance(availability_set, six.string_types): availability_set = { 'id': '/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Compute/availabilitySets/{2}'.format( subscription_id, vm_['resource_group'], availability_set ) } else: availability_set = None cloud_env = _get_cloud_environment() storage_endpoint_suffix = cloud_env.suffixes.storage_endpoint if isinstance(vm_.get('volumes'), six.string_types): volumes = salt.utils.yaml.safe_load(vm_['volumes']) else: volumes = vm_.get('volumes') data_disks = None if isinstance(volumes, list): data_disks = [] else: volumes = [] lun = 0 luns = [] for volume in volumes: if isinstance(volume, six.string_types): volume = {'name': volume} volume.setdefault( 'name', volume.get( 'name', volume.get( 'name', '{0}-datadisk{1}'.format(vm_['name'], six.text_type(lun)) ) ) ) volume.setdefault( 'disk_size_gb', volume.get( 'logical_disk_size_in_gb', volume.get('size', 100) ) ) # Old kwarg was host_caching, new name is caching volume.setdefault('caching', volume.get('host_caching', 'ReadOnly')) while lun in luns: lun += 1 if lun > 15: log.error('Maximum lun count has been reached') break volume.setdefault('lun', lun) lun += 1 # The default vhd is {vm_name}-datadisk{lun}.vhd if 'media_link' in volume: volume['vhd'] = VirtualHardDisk(volume['media_link']) del volume['media_link'] elif volume.get('vhd') == 'unmanaged': volume['vhd'] = VirtualHardDisk( 'https://{0}.blob.{1}/vhds/{2}-datadisk{3}.vhd'.format( vm_['storage_account'], storage_endpoint_suffix, vm_['name'], volume['lun'], ), ) elif 'vhd' in volume: volume['vhd'] = VirtualHardDisk(volume['vhd']) if 'image' in volume: volume['create_option'] = 'from_image' elif 'attach' in volume: volume['create_option'] = 'attach' else: volume['create_option'] = 'empty' data_disks.append(DataDisk(**volume)) img_ref = None if vm_['image'].startswith('http') or vm_.get('vhd') == 'unmanaged': if vm_['image'].startswith('http'): source_image = VirtualHardDisk(vm_['image']) else: source_image = None if '|' in vm_['image']: img_pub, img_off, img_sku, img_ver = vm_['image'].split('|') img_ref = ImageReference( publisher=img_pub, offer=img_off, sku=img_sku, version=img_ver, ) elif vm_['image'].startswith('/subscriptions'): img_ref = ImageReference(id=vm_['image']) if win_installer: os_type = 'Windows' else: os_type = 'Linux' os_disk = OSDisk( caching=CachingTypes.none, create_option=DiskCreateOptionTypes.from_image, name=disk_name, vhd=VirtualHardDisk( 'https://{0}.blob.{1}/vhds/{2}.vhd'.format( vm_['storage_account'], storage_endpoint_suffix, disk_name, ), ), os_type=os_type, image=source_image, disk_size_gb=vm_.get('os_disk_size_gb') ) else: source_image = None os_type = None os_disk = OSDisk( create_option=DiskCreateOptionTypes.from_image, disk_size_gb=vm_.get('os_disk_size_gb') ) if '|' in vm_['image']: img_pub, img_off, img_sku, img_ver = vm_['image'].split('|') img_ref = ImageReference( publisher=img_pub, offer=img_off, sku=img_sku, version=img_ver, ) elif vm_['image'].startswith('/subscriptions'): img_ref = ImageReference(id=vm_['image']) userdata_file = config.get_cloud_config_value( 'userdata_file', vm_, __opts__, search_global=False, default=None ) userdata = config.get_cloud_config_value( 'userdata', vm_, __opts__, search_global=False, default=None ) userdata_template = config.get_cloud_config_value( 'userdata_template', vm_, __opts__, search_global=False, default=None ) if userdata_file: if os.path.exists(userdata_file): with salt.utils.files.fopen(userdata_file, 'r') as fh_: userdata = fh_.read() if userdata and userdata_template: userdata_sendkeys = config.get_cloud_config_value( 'userdata_sendkeys', vm_, __opts__, search_global=False, default=None ) if userdata_sendkeys: vm_['priv_key'], vm_['pub_key'] = salt.utils.cloud.gen_keys( config.get_cloud_config_value( 'keysize', vm_, __opts__ ) ) key_id = vm_.get('name') if 'append_domain' in vm_: key_id = '.'.join([key_id, vm_['append_domain']]) salt.utils.cloud.accept_key( __opts__['pki_dir'], vm_['pub_key'], key_id ) userdata = salt.utils.cloud.userdata_template(__opts__, vm_, userdata) custom_extension = None if userdata is not None or userdata_file is not None: try: if win_installer: publisher = 'Microsoft.Compute' virtual_machine_extension_type = 'CustomScriptExtension' type_handler_version = '1.8' if userdata_file and userdata_file.endswith('.ps1'): command_prefix = 'powershell -ExecutionPolicy Unrestricted -File ' else: command_prefix = '' else: publisher = 'Microsoft.Azure.Extensions' virtual_machine_extension_type = 'CustomScript' type_handler_version = '2.0' command_prefix = '' settings = {} if userdata: settings['commandToExecute'] = userdata elif userdata_file.startswith('http'): settings['fileUris'] = [userdata_file] settings['commandToExecute'] = command_prefix + './' + userdata_file[userdata_file.rfind('/')+1:] custom_extension = { 'resource_group': vm_['resource_group'], 'virtual_machine_name': vm_['name'], 'extension_name': vm_['name'] + '_custom_userdata_script', 'location': vm_['location'], 'publisher': publisher, 'virtual_machine_extension_type': virtual_machine_extension_type, 'type_handler_version': type_handler_version, 'auto_upgrade_minor_version': True, 'settings': settings, 'protected_settings': None } except Exception as exc: log.exception('Failed to encode userdata: %s', exc) params = VirtualMachine( location=vm_['location'], plan=None, hardware_profile=HardwareProfile( vm_size=getattr( VirtualMachineSizeTypes, vm_['size'].lower() ), ), storage_profile=StorageProfile( os_disk=os_disk, data_disks=data_disks, image_reference=img_ref, ), os_profile=OSProfile( admin_username=vm_username, computer_name=vm_['name'], **os_kwargs ), network_profile=NetworkProfile( network_interfaces=[ NetworkInterfaceReference(vm_['iface_id']), ], ), availability_set=availability_set, ) __utils__['cloud.fire_event']( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args=__utils__['cloud.filter_event']( 'requesting', vm_, ['name', 'profile', 'provider', 'driver'] ), sock_dir=__opts__['sock_dir'], transport=__opts__['transport'] ) try: vm_create = compconn.virtual_machines.create_or_update( resource_group_name=vm_['resource_group'], vm_name=vm_['name'], parameters=params ) vm_create.wait() vm_result = vm_create.result() vm_result = vm_result.as_dict() if custom_extension: create_or_update_vmextension(kwargs=custom_extension) except CloudError as exc: __utils__['azurearm.log_cloud_error']('compute', exc.message) vm_result = {} return vm_result
def function[request_instance, parameter[vm_]]: constant[ Request a VM from Azure. ] variable[compconn] assign[=] call[name[get_conn], parameter[]] variable[CachingTypes] assign[=] call[name[getattr], parameter[name[compute_models], constant[CachingTypes]]] variable[DataDisk] assign[=] call[name[getattr], parameter[name[compute_models], constant[DataDisk]]] variable[DiskCreateOptionTypes] assign[=] call[name[getattr], parameter[name[compute_models], constant[DiskCreateOptionTypes]]] variable[HardwareProfile] assign[=] call[name[getattr], parameter[name[compute_models], constant[HardwareProfile]]] variable[ImageReference] assign[=] call[name[getattr], parameter[name[compute_models], constant[ImageReference]]] variable[LinuxConfiguration] assign[=] call[name[getattr], parameter[name[compute_models], constant[LinuxConfiguration]]] variable[SshConfiguration] assign[=] call[name[getattr], parameter[name[compute_models], constant[SshConfiguration]]] variable[SshPublicKey] assign[=] call[name[getattr], parameter[name[compute_models], constant[SshPublicKey]]] variable[NetworkInterfaceReference] assign[=] call[name[getattr], parameter[name[compute_models], constant[NetworkInterfaceReference]]] variable[NetworkProfile] assign[=] call[name[getattr], parameter[name[compute_models], constant[NetworkProfile]]] variable[OSDisk] assign[=] call[name[getattr], parameter[name[compute_models], constant[OSDisk]]] variable[OSProfile] assign[=] call[name[getattr], parameter[name[compute_models], constant[OSProfile]]] variable[StorageProfile] assign[=] call[name[getattr], parameter[name[compute_models], constant[StorageProfile]]] variable[VirtualHardDisk] assign[=] call[name[getattr], parameter[name[compute_models], constant[VirtualHardDisk]]] variable[VirtualMachine] assign[=] call[name[getattr], parameter[name[compute_models], constant[VirtualMachine]]] variable[VirtualMachineSizeTypes] assign[=] call[name[getattr], parameter[name[compute_models], constant[VirtualMachineSizeTypes]]] variable[subscription_id] assign[=] call[name[config].get_cloud_config_value, parameter[constant[subscription_id], call[name[get_configured_provider], parameter[]], name[__opts__]]] if compare[call[name[vm_].get, parameter[constant[driver]]] is constant[None]] begin[:] call[name[vm_]][constant[driver]] assign[=] constant[azurearm] if compare[call[name[vm_].get, parameter[constant[location]]] is constant[None]] begin[:] call[name[vm_]][constant[location]] assign[=] call[name[get_location], parameter[]] if compare[call[name[vm_].get, parameter[constant[resource_group]]] is constant[None]] begin[:] call[name[vm_]][constant[resource_group]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[resource_group], name[vm_], name[__opts__]]] if compare[call[name[vm_].get, parameter[constant[name]]] is constant[None]] begin[:] call[name[vm_]][constant[name]] assign[=] call[name[config].get_cloud_config_value, parameter[constant[name], name[vm_], name[__opts__]]] <ast.Tuple object at 0x7da1b1e318a0> assign[=] call[name[create_network_interface], parameter[]] call[name[vm_]][constant[iface_id]] assign[=] call[name[iface_data]][constant[id]] variable[disk_name] assign[=] call[constant[{0}-vol0].format, parameter[call[name[vm_]][constant[name]]]] variable[vm_username] assign[=] call[name[config].get_cloud_config_value, parameter[constant[ssh_username], name[vm_], name[__opts__]]] variable[ssh_publickeyfile_contents] assign[=] constant[None] variable[ssh_publickeyfile] assign[=] call[name[config].get_cloud_config_value, parameter[constant[ssh_publickeyfile], name[vm_], name[__opts__]]] if compare[name[ssh_publickeyfile] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b1e30cd0> variable[disable_password_authentication] assign[=] call[name[config].get_cloud_config_value, parameter[constant[disable_password_authentication], name[vm_], name[__opts__]]] variable[vm_password] assign[=] call[name[salt].utils.stringutils.to_str, parameter[call[name[config].get_cloud_config_value, parameter[constant[ssh_password], name[vm_], name[__opts__]]]]] variable[os_kwargs] assign[=] dictionary[[], []] variable[win_installer] assign[=] call[name[config].get_cloud_config_value, parameter[constant[win_installer], name[vm_], name[__opts__]]] if <ast.BoolOp object at 0x7da18ede4130> begin[:] variable[sshpublickey] assign[=] call[name[SshPublicKey], parameter[]] variable[sshconfiguration] assign[=] call[name[SshConfiguration], parameter[]] variable[linuxconfiguration] assign[=] call[name[LinuxConfiguration], parameter[]] call[name[os_kwargs]][constant[linux_configuration]] assign[=] name[linuxconfiguration] if <ast.BoolOp object at 0x7da18ede6cb0> begin[:] if <ast.UnaryOp object at 0x7da18ede45e0> begin[:] <ast.Raise object at 0x7da18ede4070> if <ast.BoolOp object at 0x7da18ede7970> begin[:] <ast.Raise object at 0x7da18ede6b60> variable[complexity] assign[=] constant[0] if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18ede78e0>]] begin[:] <ast.AugAssign object at 0x7da18ede6e00> if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18ede5870>]] begin[:] <ast.AugAssign object at 0x7da18ede4040> if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18ede5600>]] begin[:] <ast.AugAssign object at 0x7da18ede5180> if call[name[any], parameter[<ast.GeneratorExp object at 0x7da18ede5750>]] begin[:] <ast.AugAssign object at 0x7da18ede4ee0> if compare[name[complexity] less[<] constant[3]] begin[:] <ast.Raise object at 0x7da18ede57e0> call[name[os_kwargs]][constant[admin_password]] assign[=] name[vm_password] variable[availability_set] assign[=] call[name[config].get_cloud_config_value, parameter[constant[availability_set], name[vm_], name[__opts__]]] if <ast.BoolOp object at 0x7da18ede5810> begin[:] variable[availability_set] assign[=] dictionary[[<ast.Constant object at 0x7da18ede7430>], [<ast.Call object at 0x7da18ede6830>]] variable[cloud_env] assign[=] call[name[_get_cloud_environment], parameter[]] variable[storage_endpoint_suffix] assign[=] name[cloud_env].suffixes.storage_endpoint if call[name[isinstance], parameter[call[name[vm_].get, parameter[constant[volumes]]], name[six].string_types]] begin[:] variable[volumes] assign[=] call[name[salt].utils.yaml.safe_load, parameter[call[name[vm_]][constant[volumes]]]] variable[data_disks] assign[=] constant[None] if call[name[isinstance], parameter[name[volumes], name[list]]] begin[:] variable[data_disks] assign[=] list[[]] variable[lun] assign[=] constant[0] variable[luns] assign[=] list[[]] for taget[name[volume]] in starred[name[volumes]] begin[:] if call[name[isinstance], parameter[name[volume], name[six].string_types]] begin[:] variable[volume] assign[=] dictionary[[<ast.Constant object at 0x7da18ede5c90>], [<ast.Name object at 0x7da18ede69b0>]] call[name[volume].setdefault, parameter[constant[name], call[name[volume].get, parameter[constant[name], call[name[volume].get, parameter[constant[name], call[constant[{0}-datadisk{1}].format, parameter[call[name[vm_]][constant[name]], call[name[six].text_type, parameter[name[lun]]]]]]]]]]] call[name[volume].setdefault, parameter[constant[disk_size_gb], call[name[volume].get, parameter[constant[logical_disk_size_in_gb], call[name[volume].get, parameter[constant[size], constant[100]]]]]]] call[name[volume].setdefault, parameter[constant[caching], call[name[volume].get, parameter[constant[host_caching], constant[ReadOnly]]]]] while compare[name[lun] in name[luns]] begin[:] <ast.AugAssign object at 0x7da18ede48b0> if compare[name[lun] greater[>] constant[15]] begin[:] call[name[log].error, parameter[constant[Maximum lun count has been reached]]] break call[name[volume].setdefault, parameter[constant[lun], name[lun]]] <ast.AugAssign object at 0x7da1b2109240> if compare[constant[media_link] in name[volume]] begin[:] call[name[volume]][constant[vhd]] assign[=] call[name[VirtualHardDisk], parameter[call[name[volume]][constant[media_link]]]] <ast.Delete object at 0x7da1b210b130> if compare[constant[image] in name[volume]] begin[:] call[name[volume]][constant[create_option]] assign[=] constant[from_image] call[name[data_disks].append, parameter[call[name[DataDisk], parameter[]]]] variable[img_ref] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b210ace0> begin[:] if call[call[name[vm_]][constant[image]].startswith, parameter[constant[http]]] begin[:] variable[source_image] assign[=] call[name[VirtualHardDisk], parameter[call[name[vm_]][constant[image]]]] if name[win_installer] begin[:] variable[os_type] assign[=] constant[Windows] variable[os_disk] assign[=] call[name[OSDisk], parameter[]] variable[userdata_file] assign[=] call[name[config].get_cloud_config_value, parameter[constant[userdata_file], name[vm_], name[__opts__]]] variable[userdata] assign[=] call[name[config].get_cloud_config_value, parameter[constant[userdata], name[vm_], name[__opts__]]] variable[userdata_template] assign[=] call[name[config].get_cloud_config_value, parameter[constant[userdata_template], name[vm_], name[__opts__]]] if name[userdata_file] begin[:] if call[name[os].path.exists, parameter[name[userdata_file]]] begin[:] with call[name[salt].utils.files.fopen, parameter[name[userdata_file], constant[r]]] begin[:] variable[userdata] assign[=] call[name[fh_].read, parameter[]] if <ast.BoolOp object at 0x7da1b21fbfd0> begin[:] variable[userdata_sendkeys] assign[=] call[name[config].get_cloud_config_value, parameter[constant[userdata_sendkeys], name[vm_], name[__opts__]]] if name[userdata_sendkeys] begin[:] <ast.Tuple object at 0x7da1b21f8b80> assign[=] call[name[salt].utils.cloud.gen_keys, parameter[call[name[config].get_cloud_config_value, parameter[constant[keysize], name[vm_], name[__opts__]]]]] variable[key_id] assign[=] call[name[vm_].get, parameter[constant[name]]] if compare[constant[append_domain] in name[vm_]] begin[:] variable[key_id] assign[=] call[constant[.].join, parameter[list[[<ast.Name object at 0x7da1b21f96c0>, <ast.Subscript object at 0x7da1b21f9450>]]]] call[name[salt].utils.cloud.accept_key, parameter[call[name[__opts__]][constant[pki_dir]], call[name[vm_]][constant[pub_key]], name[key_id]]] variable[userdata] assign[=] call[name[salt].utils.cloud.userdata_template, parameter[name[__opts__], name[vm_], name[userdata]]] variable[custom_extension] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b21fa320> begin[:] <ast.Try object at 0x7da1b21fa530> variable[params] assign[=] call[name[VirtualMachine], parameter[]] call[call[name[__utils__]][constant[cloud.fire_event]], parameter[constant[event], constant[requesting instance], call[constant[salt/cloud/{0}/requesting].format, parameter[call[name[vm_]][constant[name]]]]]] <ast.Try object at 0x7da1b20042b0> return[name[vm_result]]
keyword[def] identifier[request_instance] ( identifier[vm_] ): literal[string] identifier[compconn] = identifier[get_conn] ( identifier[client_type] = literal[string] ) identifier[CachingTypes] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[DataDisk] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[DiskCreateOptionTypes] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[HardwareProfile] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[ImageReference] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[LinuxConfiguration] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[SshConfiguration] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[SshPublicKey] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[NetworkInterfaceReference] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[NetworkProfile] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[OSDisk] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[OSProfile] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[StorageProfile] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[VirtualHardDisk] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[VirtualMachine] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[VirtualMachineSizeTypes] = identifier[getattr] ( identifier[compute_models] , literal[string] ) identifier[subscription_id] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[get_configured_provider] (), identifier[__opts__] , identifier[search_global] = keyword[False] ) keyword[if] identifier[vm_] . identifier[get] ( literal[string] ) keyword[is] keyword[None] : identifier[vm_] [ literal[string] ]= literal[string] keyword[if] identifier[vm_] . identifier[get] ( literal[string] ) keyword[is] keyword[None] : identifier[vm_] [ literal[string] ]= identifier[get_location] () keyword[if] identifier[vm_] . identifier[get] ( literal[string] ) keyword[is] keyword[None] : identifier[vm_] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] ) keyword[if] identifier[vm_] . identifier[get] ( literal[string] ) keyword[is] keyword[None] : identifier[vm_] [ literal[string] ]= identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] ) identifier[iface_data] , identifier[public_ips] , identifier[private_ips] = identifier[create_network_interface] ( identifier[call] = literal[string] , identifier[kwargs] = identifier[vm_] ) identifier[vm_] [ literal[string] ]= identifier[iface_data] [ literal[string] ] identifier[disk_name] = literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]) identifier[vm_username] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] , identifier[default] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] ) ) identifier[ssh_publickeyfile_contents] = keyword[None] identifier[ssh_publickeyfile] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None] ) keyword[if] identifier[ssh_publickeyfile] keyword[is] keyword[not] keyword[None] : keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[ssh_publickeyfile] , literal[string] ) keyword[as] identifier[spkc_] : identifier[ssh_publickeyfile_contents] = identifier[spkc_] . identifier[read] () keyword[except] identifier[Exception] keyword[as] identifier[exc] : keyword[raise] identifier[SaltCloudConfigError] ( literal[string] literal[string] . identifier[format] ( identifier[ssh_publickeyfile] , identifier[exc] . identifier[args] [- literal[int] ]) ) identifier[disable_password_authentication] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[False] ) identifier[vm_password] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] , identifier[default] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] ) ) ) identifier[os_kwargs] ={} identifier[win_installer] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[True] ) keyword[if] keyword[not] identifier[win_installer] keyword[and] identifier[ssh_publickeyfile_contents] keyword[is] keyword[not] keyword[None] : identifier[sshpublickey] = identifier[SshPublicKey] ( identifier[key_data] = identifier[ssh_publickeyfile_contents] , identifier[path] = literal[string] . identifier[format] ( identifier[vm_username] ), ) identifier[sshconfiguration] = identifier[SshConfiguration] ( identifier[public_keys] =[ identifier[sshpublickey] ], ) identifier[linuxconfiguration] = identifier[LinuxConfiguration] ( identifier[disable_password_authentication] = identifier[disable_password_authentication] , identifier[ssh] = identifier[sshconfiguration] , ) identifier[os_kwargs] [ literal[string] ]= identifier[linuxconfiguration] keyword[if] identifier[win_installer] keyword[or] ( identifier[vm_password] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[disable_password_authentication] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[vm_password] , identifier[str] ): keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) keyword[if] identifier[len] ( identifier[vm_password] )< literal[int] keyword[or] identifier[len] ( identifier[vm_password] )> literal[int] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) identifier[complexity] = literal[int] keyword[if] identifier[any] ( identifier[char] . identifier[isdigit] () keyword[for] identifier[char] keyword[in] identifier[vm_password] ): identifier[complexity] += literal[int] keyword[if] identifier[any] ( identifier[char] . identifier[isupper] () keyword[for] identifier[char] keyword[in] identifier[vm_password] ): identifier[complexity] += literal[int] keyword[if] identifier[any] ( identifier[char] . identifier[islower] () keyword[for] identifier[char] keyword[in] identifier[vm_password] ): identifier[complexity] += literal[int] keyword[if] identifier[any] ( identifier[char] keyword[in] identifier[string] . identifier[punctuation] keyword[for] identifier[char] keyword[in] identifier[vm_password] ): identifier[complexity] += literal[int] keyword[if] identifier[complexity] < literal[int] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] ) identifier[os_kwargs] [ literal[string] ]= identifier[vm_password] identifier[availability_set] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None] ) keyword[if] identifier[availability_set] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[availability_set] , identifier[six] . identifier[string_types] ): identifier[availability_set] ={ literal[string] : literal[string] . identifier[format] ( identifier[subscription_id] , identifier[vm_] [ literal[string] ], identifier[availability_set] ) } keyword[else] : identifier[availability_set] = keyword[None] identifier[cloud_env] = identifier[_get_cloud_environment] () identifier[storage_endpoint_suffix] = identifier[cloud_env] . identifier[suffixes] . identifier[storage_endpoint] keyword[if] identifier[isinstance] ( identifier[vm_] . identifier[get] ( literal[string] ), identifier[six] . identifier[string_types] ): identifier[volumes] = identifier[salt] . identifier[utils] . identifier[yaml] . identifier[safe_load] ( identifier[vm_] [ literal[string] ]) keyword[else] : identifier[volumes] = identifier[vm_] . identifier[get] ( literal[string] ) identifier[data_disks] = keyword[None] keyword[if] identifier[isinstance] ( identifier[volumes] , identifier[list] ): identifier[data_disks] =[] keyword[else] : identifier[volumes] =[] identifier[lun] = literal[int] identifier[luns] =[] keyword[for] identifier[volume] keyword[in] identifier[volumes] : keyword[if] identifier[isinstance] ( identifier[volume] , identifier[six] . identifier[string_types] ): identifier[volume] ={ literal[string] : identifier[volume] } identifier[volume] . identifier[setdefault] ( literal[string] , identifier[volume] . identifier[get] ( literal[string] , identifier[volume] . identifier[get] ( literal[string] , literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ], identifier[six] . identifier[text_type] ( identifier[lun] )) ) ) ) identifier[volume] . identifier[setdefault] ( literal[string] , identifier[volume] . identifier[get] ( literal[string] , identifier[volume] . identifier[get] ( literal[string] , literal[int] ) ) ) identifier[volume] . identifier[setdefault] ( literal[string] , identifier[volume] . identifier[get] ( literal[string] , literal[string] )) keyword[while] identifier[lun] keyword[in] identifier[luns] : identifier[lun] += literal[int] keyword[if] identifier[lun] > literal[int] : identifier[log] . identifier[error] ( literal[string] ) keyword[break] identifier[volume] . identifier[setdefault] ( literal[string] , identifier[lun] ) identifier[lun] += literal[int] keyword[if] literal[string] keyword[in] identifier[volume] : identifier[volume] [ literal[string] ]= identifier[VirtualHardDisk] ( identifier[volume] [ literal[string] ]) keyword[del] identifier[volume] [ literal[string] ] keyword[elif] identifier[volume] . identifier[get] ( literal[string] )== literal[string] : identifier[volume] [ literal[string] ]= identifier[VirtualHardDisk] ( literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ], identifier[storage_endpoint_suffix] , identifier[vm_] [ literal[string] ], identifier[volume] [ literal[string] ], ), ) keyword[elif] literal[string] keyword[in] identifier[volume] : identifier[volume] [ literal[string] ]= identifier[VirtualHardDisk] ( identifier[volume] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[volume] : identifier[volume] [ literal[string] ]= literal[string] keyword[elif] literal[string] keyword[in] identifier[volume] : identifier[volume] [ literal[string] ]= literal[string] keyword[else] : identifier[volume] [ literal[string] ]= literal[string] identifier[data_disks] . identifier[append] ( identifier[DataDisk] (** identifier[volume] )) identifier[img_ref] = keyword[None] keyword[if] identifier[vm_] [ literal[string] ]. identifier[startswith] ( literal[string] ) keyword[or] identifier[vm_] . identifier[get] ( literal[string] )== literal[string] : keyword[if] identifier[vm_] [ literal[string] ]. identifier[startswith] ( literal[string] ): identifier[source_image] = identifier[VirtualHardDisk] ( identifier[vm_] [ literal[string] ]) keyword[else] : identifier[source_image] = keyword[None] keyword[if] literal[string] keyword[in] identifier[vm_] [ literal[string] ]: identifier[img_pub] , identifier[img_off] , identifier[img_sku] , identifier[img_ver] = identifier[vm_] [ literal[string] ]. identifier[split] ( literal[string] ) identifier[img_ref] = identifier[ImageReference] ( identifier[publisher] = identifier[img_pub] , identifier[offer] = identifier[img_off] , identifier[sku] = identifier[img_sku] , identifier[version] = identifier[img_ver] , ) keyword[elif] identifier[vm_] [ literal[string] ]. identifier[startswith] ( literal[string] ): identifier[img_ref] = identifier[ImageReference] ( identifier[id] = identifier[vm_] [ literal[string] ]) keyword[if] identifier[win_installer] : identifier[os_type] = literal[string] keyword[else] : identifier[os_type] = literal[string] identifier[os_disk] = identifier[OSDisk] ( identifier[caching] = identifier[CachingTypes] . identifier[none] , identifier[create_option] = identifier[DiskCreateOptionTypes] . identifier[from_image] , identifier[name] = identifier[disk_name] , identifier[vhd] = identifier[VirtualHardDisk] ( literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ], identifier[storage_endpoint_suffix] , identifier[disk_name] , ), ), identifier[os_type] = identifier[os_type] , identifier[image] = identifier[source_image] , identifier[disk_size_gb] = identifier[vm_] . identifier[get] ( literal[string] ) ) keyword[else] : identifier[source_image] = keyword[None] identifier[os_type] = keyword[None] identifier[os_disk] = identifier[OSDisk] ( identifier[create_option] = identifier[DiskCreateOptionTypes] . identifier[from_image] , identifier[disk_size_gb] = identifier[vm_] . identifier[get] ( literal[string] ) ) keyword[if] literal[string] keyword[in] identifier[vm_] [ literal[string] ]: identifier[img_pub] , identifier[img_off] , identifier[img_sku] , identifier[img_ver] = identifier[vm_] [ literal[string] ]. identifier[split] ( literal[string] ) identifier[img_ref] = identifier[ImageReference] ( identifier[publisher] = identifier[img_pub] , identifier[offer] = identifier[img_off] , identifier[sku] = identifier[img_sku] , identifier[version] = identifier[img_ver] , ) keyword[elif] identifier[vm_] [ literal[string] ]. identifier[startswith] ( literal[string] ): identifier[img_ref] = identifier[ImageReference] ( identifier[id] = identifier[vm_] [ literal[string] ]) identifier[userdata_file] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None] ) identifier[userdata] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None] ) identifier[userdata_template] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None] ) keyword[if] identifier[userdata_file] : keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[userdata_file] ): keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[userdata_file] , literal[string] ) keyword[as] identifier[fh_] : identifier[userdata] = identifier[fh_] . identifier[read] () keyword[if] identifier[userdata] keyword[and] identifier[userdata_template] : identifier[userdata_sendkeys] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[search_global] = keyword[False] , identifier[default] = keyword[None] ) keyword[if] identifier[userdata_sendkeys] : identifier[vm_] [ literal[string] ], identifier[vm_] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[cloud] . identifier[gen_keys] ( identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] ) ) identifier[key_id] = identifier[vm_] . identifier[get] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[vm_] : identifier[key_id] = literal[string] . identifier[join] ([ identifier[key_id] , identifier[vm_] [ literal[string] ]]) identifier[salt] . identifier[utils] . identifier[cloud] . identifier[accept_key] ( identifier[__opts__] [ literal[string] ], identifier[vm_] [ literal[string] ], identifier[key_id] ) identifier[userdata] = identifier[salt] . identifier[utils] . identifier[cloud] . identifier[userdata_template] ( identifier[__opts__] , identifier[vm_] , identifier[userdata] ) identifier[custom_extension] = keyword[None] keyword[if] identifier[userdata] keyword[is] keyword[not] keyword[None] keyword[or] identifier[userdata_file] keyword[is] keyword[not] keyword[None] : keyword[try] : keyword[if] identifier[win_installer] : identifier[publisher] = literal[string] identifier[virtual_machine_extension_type] = literal[string] identifier[type_handler_version] = literal[string] keyword[if] identifier[userdata_file] keyword[and] identifier[userdata_file] . identifier[endswith] ( literal[string] ): identifier[command_prefix] = literal[string] keyword[else] : identifier[command_prefix] = literal[string] keyword[else] : identifier[publisher] = literal[string] identifier[virtual_machine_extension_type] = literal[string] identifier[type_handler_version] = literal[string] identifier[command_prefix] = literal[string] identifier[settings] ={} keyword[if] identifier[userdata] : identifier[settings] [ literal[string] ]= identifier[userdata] keyword[elif] identifier[userdata_file] . identifier[startswith] ( literal[string] ): identifier[settings] [ literal[string] ]=[ identifier[userdata_file] ] identifier[settings] [ literal[string] ]= identifier[command_prefix] + literal[string] + identifier[userdata_file] [ identifier[userdata_file] . identifier[rfind] ( literal[string] )+ literal[int] :] identifier[custom_extension] ={ literal[string] : identifier[vm_] [ literal[string] ], literal[string] : identifier[vm_] [ literal[string] ], literal[string] : identifier[vm_] [ literal[string] ]+ literal[string] , literal[string] : identifier[vm_] [ literal[string] ], literal[string] : identifier[publisher] , literal[string] : identifier[virtual_machine_extension_type] , literal[string] : identifier[type_handler_version] , literal[string] : keyword[True] , literal[string] : identifier[settings] , literal[string] : keyword[None] } keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[log] . identifier[exception] ( literal[string] , identifier[exc] ) identifier[params] = identifier[VirtualMachine] ( identifier[location] = identifier[vm_] [ literal[string] ], identifier[plan] = keyword[None] , identifier[hardware_profile] = identifier[HardwareProfile] ( identifier[vm_size] = identifier[getattr] ( identifier[VirtualMachineSizeTypes] , identifier[vm_] [ literal[string] ]. identifier[lower] () ), ), identifier[storage_profile] = identifier[StorageProfile] ( identifier[os_disk] = identifier[os_disk] , identifier[data_disks] = identifier[data_disks] , identifier[image_reference] = identifier[img_ref] , ), identifier[os_profile] = identifier[OSProfile] ( identifier[admin_username] = identifier[vm_username] , identifier[computer_name] = identifier[vm_] [ literal[string] ], ** identifier[os_kwargs] ), identifier[network_profile] = identifier[NetworkProfile] ( identifier[network_interfaces] =[ identifier[NetworkInterfaceReference] ( identifier[vm_] [ literal[string] ]), ], ), identifier[availability_set] = identifier[availability_set] , ) identifier[__utils__] [ literal[string] ]( literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[vm_] [ literal[string] ]), identifier[args] = identifier[__utils__] [ literal[string] ]( literal[string] , identifier[vm_] , [ literal[string] , literal[string] , literal[string] , literal[string] ] ), identifier[sock_dir] = identifier[__opts__] [ literal[string] ], identifier[transport] = identifier[__opts__] [ literal[string] ] ) keyword[try] : identifier[vm_create] = identifier[compconn] . identifier[virtual_machines] . identifier[create_or_update] ( identifier[resource_group_name] = identifier[vm_] [ literal[string] ], identifier[vm_name] = identifier[vm_] [ literal[string] ], identifier[parameters] = identifier[params] ) identifier[vm_create] . identifier[wait] () identifier[vm_result] = identifier[vm_create] . identifier[result] () identifier[vm_result] = identifier[vm_result] . identifier[as_dict] () keyword[if] identifier[custom_extension] : identifier[create_or_update_vmextension] ( identifier[kwargs] = identifier[custom_extension] ) keyword[except] identifier[CloudError] keyword[as] identifier[exc] : identifier[__utils__] [ literal[string] ]( literal[string] , identifier[exc] . identifier[message] ) identifier[vm_result] ={} keyword[return] identifier[vm_result]
def request_instance(vm_): """ Request a VM from Azure. """ compconn = get_conn(client_type='compute') # pylint: disable=invalid-name CachingTypes = getattr(compute_models, 'CachingTypes') # pylint: disable=invalid-name DataDisk = getattr(compute_models, 'DataDisk') # pylint: disable=invalid-name DiskCreateOptionTypes = getattr(compute_models, 'DiskCreateOptionTypes') # pylint: disable=invalid-name HardwareProfile = getattr(compute_models, 'HardwareProfile') # pylint: disable=invalid-name ImageReference = getattr(compute_models, 'ImageReference') # pylint: disable=invalid-name LinuxConfiguration = getattr(compute_models, 'LinuxConfiguration') # pylint: disable=invalid-name SshConfiguration = getattr(compute_models, 'SshConfiguration') # pylint: disable=invalid-name SshPublicKey = getattr(compute_models, 'SshPublicKey') # pylint: disable=invalid-name NetworkInterfaceReference = getattr(compute_models, 'NetworkInterfaceReference') # pylint: disable=invalid-name NetworkProfile = getattr(compute_models, 'NetworkProfile') # pylint: disable=invalid-name OSDisk = getattr(compute_models, 'OSDisk') # pylint: disable=invalid-name OSProfile = getattr(compute_models, 'OSProfile') # pylint: disable=invalid-name StorageProfile = getattr(compute_models, 'StorageProfile') # pylint: disable=invalid-name VirtualHardDisk = getattr(compute_models, 'VirtualHardDisk') # pylint: disable=invalid-name VirtualMachine = getattr(compute_models, 'VirtualMachine') # pylint: disable=invalid-name VirtualMachineSizeTypes = getattr(compute_models, 'VirtualMachineSizeTypes') subscription_id = config.get_cloud_config_value('subscription_id', get_configured_provider(), __opts__, search_global=False) if vm_.get('driver') is None: vm_['driver'] = 'azurearm' # depends on [control=['if'], data=[]] if vm_.get('location') is None: vm_['location'] = get_location() # depends on [control=['if'], data=[]] if vm_.get('resource_group') is None: vm_['resource_group'] = config.get_cloud_config_value('resource_group', vm_, __opts__, search_global=True) # depends on [control=['if'], data=[]] if vm_.get('name') is None: vm_['name'] = config.get_cloud_config_value('name', vm_, __opts__, search_global=True) # depends on [control=['if'], data=[]] # pylint: disable=unused-variable (iface_data, public_ips, private_ips) = create_network_interface(call='action', kwargs=vm_) vm_['iface_id'] = iface_data['id'] disk_name = '{0}-vol0'.format(vm_['name']) vm_username = config.get_cloud_config_value('ssh_username', vm_, __opts__, search_global=True, default=config.get_cloud_config_value('win_username', vm_, __opts__, search_global=True)) ssh_publickeyfile_contents = None ssh_publickeyfile = config.get_cloud_config_value('ssh_publickeyfile', vm_, __opts__, search_global=False, default=None) if ssh_publickeyfile is not None: try: with salt.utils.files.fopen(ssh_publickeyfile, 'r') as spkc_: ssh_publickeyfile_contents = spkc_.read() # depends on [control=['with'], data=['spkc_']] # depends on [control=['try'], data=[]] except Exception as exc: raise SaltCloudConfigError("Failed to read ssh publickey file '{0}': {1}".format(ssh_publickeyfile, exc.args[-1])) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=['ssh_publickeyfile']] disable_password_authentication = config.get_cloud_config_value('disable_password_authentication', vm_, __opts__, search_global=False, default=False) vm_password = salt.utils.stringutils.to_str(config.get_cloud_config_value('ssh_password', vm_, __opts__, search_global=True, default=config.get_cloud_config_value('win_password', vm_, __opts__, search_global=True))) os_kwargs = {} win_installer = config.get_cloud_config_value('win_installer', vm_, __opts__, search_global=True) if not win_installer and ssh_publickeyfile_contents is not None: sshpublickey = SshPublicKey(key_data=ssh_publickeyfile_contents, path='/home/{0}/.ssh/authorized_keys'.format(vm_username)) sshconfiguration = SshConfiguration(public_keys=[sshpublickey]) linuxconfiguration = LinuxConfiguration(disable_password_authentication=disable_password_authentication, ssh=sshconfiguration) os_kwargs['linux_configuration'] = linuxconfiguration # depends on [control=['if'], data=[]] if win_installer or (vm_password is not None and (not disable_password_authentication)): if not isinstance(vm_password, str): raise SaltCloudSystemExit('The admin password must be a string.') # depends on [control=['if'], data=[]] if len(vm_password) < 8 or len(vm_password) > 123: raise SaltCloudSystemExit('The admin password must be between 8-123 characters long.') # depends on [control=['if'], data=[]] complexity = 0 if any((char.isdigit() for char in vm_password)): complexity += 1 # depends on [control=['if'], data=[]] if any((char.isupper() for char in vm_password)): complexity += 1 # depends on [control=['if'], data=[]] if any((char.islower() for char in vm_password)): complexity += 1 # depends on [control=['if'], data=[]] if any((char in string.punctuation for char in vm_password)): complexity += 1 # depends on [control=['if'], data=[]] if complexity < 3: raise SaltCloudSystemExit('The admin password must contain at least 3 of the following types: upper, lower, digits, special characters') # depends on [control=['if'], data=[]] os_kwargs['admin_password'] = vm_password # depends on [control=['if'], data=[]] availability_set = config.get_cloud_config_value('availability_set', vm_, __opts__, search_global=False, default=None) if availability_set is not None and isinstance(availability_set, six.string_types): availability_set = {'id': '/subscriptions/{0}/resourceGroups/{1}/providers/Microsoft.Compute/availabilitySets/{2}'.format(subscription_id, vm_['resource_group'], availability_set)} # depends on [control=['if'], data=[]] else: availability_set = None cloud_env = _get_cloud_environment() storage_endpoint_suffix = cloud_env.suffixes.storage_endpoint if isinstance(vm_.get('volumes'), six.string_types): volumes = salt.utils.yaml.safe_load(vm_['volumes']) # depends on [control=['if'], data=[]] else: volumes = vm_.get('volumes') data_disks = None if isinstance(volumes, list): data_disks = [] # depends on [control=['if'], data=[]] else: volumes = [] lun = 0 luns = [] for volume in volumes: if isinstance(volume, six.string_types): volume = {'name': volume} # depends on [control=['if'], data=[]] volume.setdefault('name', volume.get('name', volume.get('name', '{0}-datadisk{1}'.format(vm_['name'], six.text_type(lun))))) volume.setdefault('disk_size_gb', volume.get('logical_disk_size_in_gb', volume.get('size', 100))) # Old kwarg was host_caching, new name is caching volume.setdefault('caching', volume.get('host_caching', 'ReadOnly')) while lun in luns: lun += 1 if lun > 15: log.error('Maximum lun count has been reached') break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['lun']] volume.setdefault('lun', lun) lun += 1 # The default vhd is {vm_name}-datadisk{lun}.vhd if 'media_link' in volume: volume['vhd'] = VirtualHardDisk(volume['media_link']) del volume['media_link'] # depends on [control=['if'], data=['volume']] elif volume.get('vhd') == 'unmanaged': volume['vhd'] = VirtualHardDisk('https://{0}.blob.{1}/vhds/{2}-datadisk{3}.vhd'.format(vm_['storage_account'], storage_endpoint_suffix, vm_['name'], volume['lun'])) # depends on [control=['if'], data=[]] elif 'vhd' in volume: volume['vhd'] = VirtualHardDisk(volume['vhd']) # depends on [control=['if'], data=['volume']] if 'image' in volume: volume['create_option'] = 'from_image' # depends on [control=['if'], data=['volume']] elif 'attach' in volume: volume['create_option'] = 'attach' # depends on [control=['if'], data=['volume']] else: volume['create_option'] = 'empty' data_disks.append(DataDisk(**volume)) # depends on [control=['for'], data=['volume']] img_ref = None if vm_['image'].startswith('http') or vm_.get('vhd') == 'unmanaged': if vm_['image'].startswith('http'): source_image = VirtualHardDisk(vm_['image']) # depends on [control=['if'], data=[]] else: source_image = None if '|' in vm_['image']: (img_pub, img_off, img_sku, img_ver) = vm_['image'].split('|') img_ref = ImageReference(publisher=img_pub, offer=img_off, sku=img_sku, version=img_ver) # depends on [control=['if'], data=[]] elif vm_['image'].startswith('/subscriptions'): img_ref = ImageReference(id=vm_['image']) # depends on [control=['if'], data=[]] if win_installer: os_type = 'Windows' # depends on [control=['if'], data=[]] else: os_type = 'Linux' os_disk = OSDisk(caching=CachingTypes.none, create_option=DiskCreateOptionTypes.from_image, name=disk_name, vhd=VirtualHardDisk('https://{0}.blob.{1}/vhds/{2}.vhd'.format(vm_['storage_account'], storage_endpoint_suffix, disk_name)), os_type=os_type, image=source_image, disk_size_gb=vm_.get('os_disk_size_gb')) # depends on [control=['if'], data=[]] else: source_image = None os_type = None os_disk = OSDisk(create_option=DiskCreateOptionTypes.from_image, disk_size_gb=vm_.get('os_disk_size_gb')) if '|' in vm_['image']: (img_pub, img_off, img_sku, img_ver) = vm_['image'].split('|') img_ref = ImageReference(publisher=img_pub, offer=img_off, sku=img_sku, version=img_ver) # depends on [control=['if'], data=[]] elif vm_['image'].startswith('/subscriptions'): img_ref = ImageReference(id=vm_['image']) # depends on [control=['if'], data=[]] userdata_file = config.get_cloud_config_value('userdata_file', vm_, __opts__, search_global=False, default=None) userdata = config.get_cloud_config_value('userdata', vm_, __opts__, search_global=False, default=None) userdata_template = config.get_cloud_config_value('userdata_template', vm_, __opts__, search_global=False, default=None) if userdata_file: if os.path.exists(userdata_file): with salt.utils.files.fopen(userdata_file, 'r') as fh_: userdata = fh_.read() # depends on [control=['with'], data=['fh_']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if userdata and userdata_template: userdata_sendkeys = config.get_cloud_config_value('userdata_sendkeys', vm_, __opts__, search_global=False, default=None) if userdata_sendkeys: (vm_['priv_key'], vm_['pub_key']) = salt.utils.cloud.gen_keys(config.get_cloud_config_value('keysize', vm_, __opts__)) key_id = vm_.get('name') if 'append_domain' in vm_: key_id = '.'.join([key_id, vm_['append_domain']]) # depends on [control=['if'], data=['vm_']] salt.utils.cloud.accept_key(__opts__['pki_dir'], vm_['pub_key'], key_id) # depends on [control=['if'], data=[]] userdata = salt.utils.cloud.userdata_template(__opts__, vm_, userdata) # depends on [control=['if'], data=[]] custom_extension = None if userdata is not None or userdata_file is not None: try: if win_installer: publisher = 'Microsoft.Compute' virtual_machine_extension_type = 'CustomScriptExtension' type_handler_version = '1.8' if userdata_file and userdata_file.endswith('.ps1'): command_prefix = 'powershell -ExecutionPolicy Unrestricted -File ' # depends on [control=['if'], data=[]] else: command_prefix = '' # depends on [control=['if'], data=[]] else: publisher = 'Microsoft.Azure.Extensions' virtual_machine_extension_type = 'CustomScript' type_handler_version = '2.0' command_prefix = '' settings = {} if userdata: settings['commandToExecute'] = userdata # depends on [control=['if'], data=[]] elif userdata_file.startswith('http'): settings['fileUris'] = [userdata_file] settings['commandToExecute'] = command_prefix + './' + userdata_file[userdata_file.rfind('/') + 1:] # depends on [control=['if'], data=[]] custom_extension = {'resource_group': vm_['resource_group'], 'virtual_machine_name': vm_['name'], 'extension_name': vm_['name'] + '_custom_userdata_script', 'location': vm_['location'], 'publisher': publisher, 'virtual_machine_extension_type': virtual_machine_extension_type, 'type_handler_version': type_handler_version, 'auto_upgrade_minor_version': True, 'settings': settings, 'protected_settings': None} # depends on [control=['try'], data=[]] except Exception as exc: log.exception('Failed to encode userdata: %s', exc) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]] params = VirtualMachine(location=vm_['location'], plan=None, hardware_profile=HardwareProfile(vm_size=getattr(VirtualMachineSizeTypes, vm_['size'].lower())), storage_profile=StorageProfile(os_disk=os_disk, data_disks=data_disks, image_reference=img_ref), os_profile=OSProfile(admin_username=vm_username, computer_name=vm_['name'], **os_kwargs), network_profile=NetworkProfile(network_interfaces=[NetworkInterfaceReference(vm_['iface_id'])]), availability_set=availability_set) __utils__['cloud.fire_event']('event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), args=__utils__['cloud.filter_event']('requesting', vm_, ['name', 'profile', 'provider', 'driver']), sock_dir=__opts__['sock_dir'], transport=__opts__['transport']) try: vm_create = compconn.virtual_machines.create_or_update(resource_group_name=vm_['resource_group'], vm_name=vm_['name'], parameters=params) vm_create.wait() vm_result = vm_create.result() vm_result = vm_result.as_dict() if custom_extension: create_or_update_vmextension(kwargs=custom_extension) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except CloudError as exc: __utils__['azurearm.log_cloud_error']('compute', exc.message) vm_result = {} # depends on [control=['except'], data=['exc']] return vm_result
def config_from_url(u, **kwargs): """ Returns dict containing zmq configuration arguments parsed from xbahn url Arguments: - u (urlparse.urlparse result) Returns: dict: - id (str): connection index key - typ_str (str): string representation of zmq socket type - typ (int): zmq socket type (PUB, SUB, REQ, REP, PUSH, PULL) - topic (str): subscription topic - url (str): url to use with zmq's bind function """ path = u.path.lstrip("/").split("/") if len(path) > 2 or not path: raise AssertionError("zmq url format: zmq://<host>:<port>/<pub|sub>/<topic>") typ = path[0].upper() try: topic = path[1] except IndexError as _: topic = '' param = dict(urllib.parse.parse_qsl(u.query)) #FIXME: should come from schema, maybe zmq+tcp:// ? transport = param.get("transport", "tcp") _id = "%s-%s-%s-%s" % (typ, topic, transport, u.netloc) if kwargs.get("prefix") is not None: _id = "%s-%s" % (kwargs.get("prefix"), _id) return { "id" : _id, "typ_str" : typ, "typ" : getattr(zmq, typ), "topic" : topic, "transport" : transport, "url" : "%s://%s" % (transport, u.netloc) }
def function[config_from_url, parameter[u]]: constant[ Returns dict containing zmq configuration arguments parsed from xbahn url Arguments: - u (urlparse.urlparse result) Returns: dict: - id (str): connection index key - typ_str (str): string representation of zmq socket type - typ (int): zmq socket type (PUB, SUB, REQ, REP, PUSH, PULL) - topic (str): subscription topic - url (str): url to use with zmq's bind function ] variable[path] assign[=] call[call[name[u].path.lstrip, parameter[constant[/]]].split, parameter[constant[/]]] if <ast.BoolOp object at 0x7da1b15b1090> begin[:] <ast.Raise object at 0x7da1b15b3c10> variable[typ] assign[=] call[call[name[path]][constant[0]].upper, parameter[]] <ast.Try object at 0x7da1b15b2470> variable[param] assign[=] call[name[dict], parameter[call[name[urllib].parse.parse_qsl, parameter[name[u].query]]]] variable[transport] assign[=] call[name[param].get, parameter[constant[transport], constant[tcp]]] variable[_id] assign[=] binary_operation[constant[%s-%s-%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b15b1750>, <ast.Name object at 0x7da1b15b2710>, <ast.Name object at 0x7da1b15b11e0>, <ast.Attribute object at 0x7da1b15b1330>]]] if compare[call[name[kwargs].get, parameter[constant[prefix]]] is_not constant[None]] begin[:] variable[_id] assign[=] binary_operation[constant[%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b15b24d0>, <ast.Name object at 0x7da1b15b3fd0>]]] return[dictionary[[<ast.Constant object at 0x7da1b15b1960>, <ast.Constant object at 0x7da1b15b25f0>, <ast.Constant object at 0x7da1b15b1a80>, <ast.Constant object at 0x7da1b15b1ed0>, <ast.Constant object at 0x7da1b15b3280>, <ast.Constant object at 0x7da1b15b14b0>], [<ast.Name object at 0x7da1b15b1270>, <ast.Name object at 0x7da1b15b3850>, <ast.Call object at 0x7da1b15b32e0>, <ast.Name object at 0x7da1b1351db0>, <ast.Name object at 0x7da1b1351d80>, <ast.BinOp object at 0x7da1b1352260>]]]
keyword[def] identifier[config_from_url] ( identifier[u] ,** identifier[kwargs] ): literal[string] identifier[path] = identifier[u] . identifier[path] . identifier[lstrip] ( literal[string] ). identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[path] )> literal[int] keyword[or] keyword[not] identifier[path] : keyword[raise] identifier[AssertionError] ( literal[string] ) identifier[typ] = identifier[path] [ literal[int] ]. identifier[upper] () keyword[try] : identifier[topic] = identifier[path] [ literal[int] ] keyword[except] identifier[IndexError] keyword[as] identifier[_] : identifier[topic] = literal[string] identifier[param] = identifier[dict] ( identifier[urllib] . identifier[parse] . identifier[parse_qsl] ( identifier[u] . identifier[query] )) identifier[transport] = identifier[param] . identifier[get] ( literal[string] , literal[string] ) identifier[_id] = literal[string] %( identifier[typ] , identifier[topic] , identifier[transport] , identifier[u] . identifier[netloc] ) keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ) keyword[is] keyword[not] keyword[None] : identifier[_id] = literal[string] %( identifier[kwargs] . identifier[get] ( literal[string] ), identifier[_id] ) keyword[return] { literal[string] : identifier[_id] , literal[string] : identifier[typ] , literal[string] : identifier[getattr] ( identifier[zmq] , identifier[typ] ), literal[string] : identifier[topic] , literal[string] : identifier[transport] , literal[string] : literal[string] %( identifier[transport] , identifier[u] . identifier[netloc] ) }
def config_from_url(u, **kwargs): """ Returns dict containing zmq configuration arguments parsed from xbahn url Arguments: - u (urlparse.urlparse result) Returns: dict: - id (str): connection index key - typ_str (str): string representation of zmq socket type - typ (int): zmq socket type (PUB, SUB, REQ, REP, PUSH, PULL) - topic (str): subscription topic - url (str): url to use with zmq's bind function """ path = u.path.lstrip('/').split('/') if len(path) > 2 or not path: raise AssertionError('zmq url format: zmq://<host>:<port>/<pub|sub>/<topic>') # depends on [control=['if'], data=[]] typ = path[0].upper() try: topic = path[1] # depends on [control=['try'], data=[]] except IndexError as _: topic = '' # depends on [control=['except'], data=[]] param = dict(urllib.parse.parse_qsl(u.query)) #FIXME: should come from schema, maybe zmq+tcp:// ? transport = param.get('transport', 'tcp') _id = '%s-%s-%s-%s' % (typ, topic, transport, u.netloc) if kwargs.get('prefix') is not None: _id = '%s-%s' % (kwargs.get('prefix'), _id) # depends on [control=['if'], data=[]] return {'id': _id, 'typ_str': typ, 'typ': getattr(zmq, typ), 'topic': topic, 'transport': transport, 'url': '%s://%s' % (transport, u.netloc)}
def request_permission(cls, fine=True): """ Requests permission and returns an async result that returns a boolean indicating if the permission was granted or denied. """ app = AndroidApplication.instance() permission = (cls.ACCESS_FINE_PERMISSION if fine else cls.ACCESS_COARSE_PERMISSION) f = app.create_future() def on_result(perms): app.set_future_result(f, perms[permission]) app.request_permissions([permission]).then(on_result) return f
def function[request_permission, parameter[cls, fine]]: constant[ Requests permission and returns an async result that returns a boolean indicating if the permission was granted or denied. ] variable[app] assign[=] call[name[AndroidApplication].instance, parameter[]] variable[permission] assign[=] <ast.IfExp object at 0x7da18c4cd6f0> variable[f] assign[=] call[name[app].create_future, parameter[]] def function[on_result, parameter[perms]]: call[name[app].set_future_result, parameter[name[f], call[name[perms]][name[permission]]]] call[call[name[app].request_permissions, parameter[list[[<ast.Name object at 0x7da1b1b7ef20>]]]].then, parameter[name[on_result]]] return[name[f]]
keyword[def] identifier[request_permission] ( identifier[cls] , identifier[fine] = keyword[True] ): literal[string] identifier[app] = identifier[AndroidApplication] . identifier[instance] () identifier[permission] =( identifier[cls] . identifier[ACCESS_FINE_PERMISSION] keyword[if] identifier[fine] keyword[else] identifier[cls] . identifier[ACCESS_COARSE_PERMISSION] ) identifier[f] = identifier[app] . identifier[create_future] () keyword[def] identifier[on_result] ( identifier[perms] ): identifier[app] . identifier[set_future_result] ( identifier[f] , identifier[perms] [ identifier[permission] ]) identifier[app] . identifier[request_permissions] ([ identifier[permission] ]). identifier[then] ( identifier[on_result] ) keyword[return] identifier[f]
def request_permission(cls, fine=True): """ Requests permission and returns an async result that returns a boolean indicating if the permission was granted or denied. """ app = AndroidApplication.instance() permission = cls.ACCESS_FINE_PERMISSION if fine else cls.ACCESS_COARSE_PERMISSION f = app.create_future() def on_result(perms): app.set_future_result(f, perms[permission]) app.request_permissions([permission]).then(on_result) return f
def is_isomorphic_to(self, other): """ Returns true if all fields of other struct are isomorphic to this struct's fields """ return (isinstance(other, self.__class__) and len(self.fields) == len(other.fields) and all(a.is_isomorphic_to(b) for a, b in zip(self.fields, other.fields)))
def function[is_isomorphic_to, parameter[self, other]]: constant[ Returns true if all fields of other struct are isomorphic to this struct's fields ] return[<ast.BoolOp object at 0x7da1b00cb160>]
keyword[def] identifier[is_isomorphic_to] ( identifier[self] , identifier[other] ): literal[string] keyword[return] ( identifier[isinstance] ( identifier[other] , identifier[self] . identifier[__class__] ) keyword[and] identifier[len] ( identifier[self] . identifier[fields] )== identifier[len] ( identifier[other] . identifier[fields] ) keyword[and] identifier[all] ( identifier[a] . identifier[is_isomorphic_to] ( identifier[b] ) keyword[for] identifier[a] , identifier[b] keyword[in] identifier[zip] ( identifier[self] . identifier[fields] , identifier[other] . identifier[fields] )))
def is_isomorphic_to(self, other): """ Returns true if all fields of other struct are isomorphic to this struct's fields """ return isinstance(other, self.__class__) and len(self.fields) == len(other.fields) and all((a.is_isomorphic_to(b) for (a, b) in zip(self.fields, other.fields)))
def configure_widget_for_editing(self, widget): """ A widget have to be added to the editor, it is configured here in order to be conformant to the editor """ if not 'editor_varname' in widget.attributes: return widget.onclick.do(self.on_widget_selection) #setup of the on_dropped function of the widget in order to manage the dragNdrop widget.__class__.on_dropped = on_dropped #drag properties #widget.style['resize'] = 'both' widget.style['overflow'] = 'auto' widget.attributes['draggable'] = 'true' widget.attributes['tabindex']=str(self.tabindex) #if not 'position' in widget.style.keys(): # widget.style['position'] = 'absolute' #if not 'left' in widget.style.keys(): # widget.style['left'] = '1px' #if not 'top' in widget.style.keys(): # widget.style['top'] = '1px' self.tabindex += 1
def function[configure_widget_for_editing, parameter[self, widget]]: constant[ A widget have to be added to the editor, it is configured here in order to be conformant to the editor ] if <ast.UnaryOp object at 0x7da207f01c30> begin[:] return[None] call[name[widget].onclick.do, parameter[name[self].on_widget_selection]] name[widget].__class__.on_dropped assign[=] name[on_dropped] call[name[widget].style][constant[overflow]] assign[=] constant[auto] call[name[widget].attributes][constant[draggable]] assign[=] constant[true] call[name[widget].attributes][constant[tabindex]] assign[=] call[name[str], parameter[name[self].tabindex]] <ast.AugAssign object at 0x7da207f00130>
keyword[def] identifier[configure_widget_for_editing] ( identifier[self] , identifier[widget] ): literal[string] keyword[if] keyword[not] literal[string] keyword[in] identifier[widget] . identifier[attributes] : keyword[return] identifier[widget] . identifier[onclick] . identifier[do] ( identifier[self] . identifier[on_widget_selection] ) identifier[widget] . identifier[__class__] . identifier[on_dropped] = identifier[on_dropped] identifier[widget] . identifier[style] [ literal[string] ]= literal[string] identifier[widget] . identifier[attributes] [ literal[string] ]= literal[string] identifier[widget] . identifier[attributes] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[tabindex] ) identifier[self] . identifier[tabindex] += literal[int]
def configure_widget_for_editing(self, widget): """ A widget have to be added to the editor, it is configured here in order to be conformant to the editor """ if not 'editor_varname' in widget.attributes: return # depends on [control=['if'], data=[]] widget.onclick.do(self.on_widget_selection) #setup of the on_dropped function of the widget in order to manage the dragNdrop widget.__class__.on_dropped = on_dropped #drag properties #widget.style['resize'] = 'both' widget.style['overflow'] = 'auto' widget.attributes['draggable'] = 'true' widget.attributes['tabindex'] = str(self.tabindex) #if not 'position' in widget.style.keys(): # widget.style['position'] = 'absolute' #if not 'left' in widget.style.keys(): # widget.style['left'] = '1px' #if not 'top' in widget.style.keys(): # widget.style['top'] = '1px' self.tabindex += 1
def check_parallel_run(self): # pragma: no cover, not with unit tests... """Check (in pid file) if there isn't already a daemon running. If yes and do_replace: kill it. Keep in self.fpid the File object to the pid file. Will be used by writepid. :return: None """ # TODO: other daemon run on nt if os.name == 'nt': # pragma: no cover, not currently tested with Windows... logger.warning("The parallel daemon check is not available on Windows") self.__open_pidfile(write=True) return # First open the pid file in open mode self.__open_pidfile() try: pid_var = self.fpid.readline().strip(' \r\n') if pid_var: pid = int(pid_var) logger.info("Found an existing pid (%s): '%s'", self.pid_filename, pid_var) else: logger.debug("Not found an existing pid: %s", self.pid_filename) return except (IOError, ValueError) as err: logger.warning("PID file is empty or has an invalid content: %s", self.pid_filename) return if pid == os.getpid(): self.pid = pid return try: logger.debug("Testing if the process is running: '%s'", pid) os.kill(pid, 0) except OSError: # consider any exception as a stale pid file. # this includes : # * PermissionError when a process with same pid exists but is executed by another user # * ProcessLookupError: [Errno 3] No such process self.pre_log.append(("DEBUG", "No former instance to replace")) logger.info("A stale pid file exists, reusing the same file") return if not self.do_replace: self.exit_on_error("A valid pid file still exists (pid=%s) and " "I am not allowed to replace. Exiting!" % pid, exit_code=3) self.pre_log.append(("DEBUG", "Replacing former instance: %d" % pid)) try: pgid = os.getpgid(pid) # SIGQUIT to terminate and dump core os.killpg(pgid, signal.SIGQUIT) except os.error as err: if err.errno != errno.ESRCH: raise self.fpid.close() # TODO: give some time to wait that previous instance finishes? time.sleep(1) # we must also reopen the pid file in write mode # because the previous instance should have deleted it!! self.__open_pidfile(write=True)
def function[check_parallel_run, parameter[self]]: constant[Check (in pid file) if there isn't already a daemon running. If yes and do_replace: kill it. Keep in self.fpid the File object to the pid file. Will be used by writepid. :return: None ] if compare[name[os].name equal[==] constant[nt]] begin[:] call[name[logger].warning, parameter[constant[The parallel daemon check is not available on Windows]]] call[name[self].__open_pidfile, parameter[]] return[None] call[name[self].__open_pidfile, parameter[]] <ast.Try object at 0x7da2045670a0> if compare[name[pid] equal[==] call[name[os].getpid, parameter[]]] begin[:] name[self].pid assign[=] name[pid] return[None] <ast.Try object at 0x7da204566a70> if <ast.UnaryOp object at 0x7da204564b80> begin[:] call[name[self].exit_on_error, parameter[binary_operation[constant[A valid pid file still exists (pid=%s) and I am not allowed to replace. Exiting!] <ast.Mod object at 0x7da2590d6920> name[pid]]]] call[name[self].pre_log.append, parameter[tuple[[<ast.Constant object at 0x7da204567e20>, <ast.BinOp object at 0x7da2045664d0>]]]] <ast.Try object at 0x7da2045648e0> call[name[self].fpid.close, parameter[]] call[name[time].sleep, parameter[constant[1]]] call[name[self].__open_pidfile, parameter[]]
keyword[def] identifier[check_parallel_run] ( identifier[self] ): literal[string] keyword[if] identifier[os] . identifier[name] == literal[string] : identifier[logger] . identifier[warning] ( literal[string] ) identifier[self] . identifier[__open_pidfile] ( identifier[write] = keyword[True] ) keyword[return] identifier[self] . identifier[__open_pidfile] () keyword[try] : identifier[pid_var] = identifier[self] . identifier[fpid] . identifier[readline] (). identifier[strip] ( literal[string] ) keyword[if] identifier[pid_var] : identifier[pid] = identifier[int] ( identifier[pid_var] ) identifier[logger] . identifier[info] ( literal[string] , identifier[self] . identifier[pid_filename] , identifier[pid_var] ) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[pid_filename] ) keyword[return] keyword[except] ( identifier[IOError] , identifier[ValueError] ) keyword[as] identifier[err] : identifier[logger] . identifier[warning] ( literal[string] , identifier[self] . identifier[pid_filename] ) keyword[return] keyword[if] identifier[pid] == identifier[os] . identifier[getpid] (): identifier[self] . identifier[pid] = identifier[pid] keyword[return] keyword[try] : identifier[logger] . identifier[debug] ( literal[string] , identifier[pid] ) identifier[os] . identifier[kill] ( identifier[pid] , literal[int] ) keyword[except] identifier[OSError] : identifier[self] . identifier[pre_log] . identifier[append] (( literal[string] , literal[string] )) identifier[logger] . identifier[info] ( literal[string] ) keyword[return] keyword[if] keyword[not] identifier[self] . identifier[do_replace] : identifier[self] . identifier[exit_on_error] ( literal[string] literal[string] % identifier[pid] , identifier[exit_code] = literal[int] ) identifier[self] . identifier[pre_log] . identifier[append] (( literal[string] , literal[string] % identifier[pid] )) keyword[try] : identifier[pgid] = identifier[os] . identifier[getpgid] ( identifier[pid] ) identifier[os] . identifier[killpg] ( identifier[pgid] , identifier[signal] . identifier[SIGQUIT] ) keyword[except] identifier[os] . identifier[error] keyword[as] identifier[err] : keyword[if] identifier[err] . identifier[errno] != identifier[errno] . identifier[ESRCH] : keyword[raise] identifier[self] . identifier[fpid] . identifier[close] () identifier[time] . identifier[sleep] ( literal[int] ) identifier[self] . identifier[__open_pidfile] ( identifier[write] = keyword[True] )
def check_parallel_run(self): # pragma: no cover, not with unit tests... "Check (in pid file) if there isn't already a daemon running.\n If yes and do_replace: kill it.\n Keep in self.fpid the File object to the pid file. Will be used by writepid.\n\n :return: None\n " # TODO: other daemon run on nt if os.name == 'nt': # pragma: no cover, not currently tested with Windows... logger.warning('The parallel daemon check is not available on Windows') self.__open_pidfile(write=True) return # depends on [control=['if'], data=[]] # First open the pid file in open mode self.__open_pidfile() try: pid_var = self.fpid.readline().strip(' \r\n') if pid_var: pid = int(pid_var) logger.info("Found an existing pid (%s): '%s'", self.pid_filename, pid_var) # depends on [control=['if'], data=[]] else: logger.debug('Not found an existing pid: %s', self.pid_filename) return # depends on [control=['try'], data=[]] except (IOError, ValueError) as err: logger.warning('PID file is empty or has an invalid content: %s', self.pid_filename) return # depends on [control=['except'], data=[]] if pid == os.getpid(): self.pid = pid return # depends on [control=['if'], data=['pid']] try: logger.debug("Testing if the process is running: '%s'", pid) os.kill(pid, 0) # depends on [control=['try'], data=[]] except OSError: # consider any exception as a stale pid file. # this includes : # * PermissionError when a process with same pid exists but is executed by another user # * ProcessLookupError: [Errno 3] No such process self.pre_log.append(('DEBUG', 'No former instance to replace')) logger.info('A stale pid file exists, reusing the same file') return # depends on [control=['except'], data=[]] if not self.do_replace: self.exit_on_error('A valid pid file still exists (pid=%s) and I am not allowed to replace. Exiting!' % pid, exit_code=3) # depends on [control=['if'], data=[]] self.pre_log.append(('DEBUG', 'Replacing former instance: %d' % pid)) try: pgid = os.getpgid(pid) # SIGQUIT to terminate and dump core os.killpg(pgid, signal.SIGQUIT) # depends on [control=['try'], data=[]] except os.error as err: if err.errno != errno.ESRCH: raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['err']] self.fpid.close() # TODO: give some time to wait that previous instance finishes? time.sleep(1) # we must also reopen the pid file in write mode # because the previous instance should have deleted it!! self.__open_pidfile(write=True)
def setShowGrid( self, state ): """ Sets whether or not this delegate should draw its grid lines. :param state | <bool> """ delegate = self.itemDelegate() if ( isinstance(delegate, XTreeWidgetDelegate) ): delegate.setShowGrid(state)
def function[setShowGrid, parameter[self, state]]: constant[ Sets whether or not this delegate should draw its grid lines. :param state | <bool> ] variable[delegate] assign[=] call[name[self].itemDelegate, parameter[]] if call[name[isinstance], parameter[name[delegate], name[XTreeWidgetDelegate]]] begin[:] call[name[delegate].setShowGrid, parameter[name[state]]]
keyword[def] identifier[setShowGrid] ( identifier[self] , identifier[state] ): literal[string] identifier[delegate] = identifier[self] . identifier[itemDelegate] () keyword[if] ( identifier[isinstance] ( identifier[delegate] , identifier[XTreeWidgetDelegate] )): identifier[delegate] . identifier[setShowGrid] ( identifier[state] )
def setShowGrid(self, state): """ Sets whether or not this delegate should draw its grid lines. :param state | <bool> """ delegate = self.itemDelegate() if isinstance(delegate, XTreeWidgetDelegate): delegate.setShowGrid(state) # depends on [control=['if'], data=[]]
def collect_split_adjustments(self, adjustments_for_sid, requested_qtr_data, dates, sid, sid_idx, sid_estimates, split_adjusted_asof_idx, pre_adjustments, post_adjustments, requested_split_adjusted_columns): """ Collect split adjustments for future quarters. Re-apply adjustments that would be overwritten by overwrites. Merge split adjustments with overwrites into the given dictionary of splits for the given sid. Parameters ---------- adjustments_for_sid : dict[str -> dict[int -> list]] The dictionary of adjustments to which splits need to be added. Initially it contains only overwrites. requested_qtr_data : pd.DataFrame The requested quarter data for each calendar date per sid. dates : pd.DatetimeIndex The calendar dates for which estimates data is requested. sid : int The sid for which adjustments need to be collected. sid_idx : int The index of `sid` in the adjusted array. sid_estimates : pd.DataFrame The raw estimates data for the given sid. split_adjusted_asof_idx : int The index in `dates` as-of which the data is split adjusted. pre_adjustments : tuple(list(float), list(int), pd.DatetimeIndex) The adjustment values and indexes in `dates` for adjustments that happened before the split-asof-date. post_adjustments : tuple(list(float), list(int), pd.DatetimeIndex) The adjustment values, indexes in `dates`, and timestamps for adjustments that happened after the split-asof-date. requested_split_adjusted_columns : list of str List of requested split adjusted column names. """ (pre_adjustments_dict, post_adjustments_dict) = self._collect_adjustments( requested_qtr_data, sid, sid_idx, sid_estimates, split_adjusted_asof_idx, pre_adjustments, post_adjustments, requested_split_adjusted_columns, ) for column_name in requested_split_adjusted_columns: for overwrite_ts in adjustments_for_sid[column_name]: # We need to cumulatively re-apply all adjustments up to the # split-adjusted-asof-date. We might not have any # pre-adjustments, so we should check for that. if overwrite_ts <= split_adjusted_asof_idx \ and pre_adjustments_dict: for split_ts in pre_adjustments_dict[column_name]: # The split has to have occurred during the span of # the overwrite. if split_ts < overwrite_ts: # Create new adjustments here so that we can # re-apply all applicable adjustments to ONLY # the dates being overwritten. adjustments_for_sid[ column_name ][overwrite_ts].extend([ Float64Multiply( 0, overwrite_ts - 1, sid_idx, sid_idx, adjustment.value ) for adjustment in pre_adjustments_dict[ column_name ][split_ts] ]) # After the split-adjusted-asof-date, we need to re-apply all # adjustments that occur after that date and within the # bounds of the overwrite. They need to be applied starting # from the first date and until an end date. The end date is # the date of the newest information we get about # `requested_quarter` that is >= `split_ts`, or if there is no # new knowledge before `overwrite_ts`, then it is the date # before `overwrite_ts`. else: # Overwrites happen at the first index of a new quarter, # so determine here which quarter that is. requested_quarter = requested_qtr_data[ SHIFTED_NORMALIZED_QTRS, sid ].iloc[overwrite_ts] for adjustment_value, date_index, timestamp in zip( *post_adjustments ): if split_adjusted_asof_idx < date_index < overwrite_ts: # Assume the entire overwrite contains stale data upper_bound = overwrite_ts - 1 end_idx = self.determine_end_idx_for_adjustment( timestamp, dates, upper_bound, requested_quarter, sid_estimates ) adjustments_for_sid[ column_name ][overwrite_ts].append( Float64Multiply( 0, end_idx, sid_idx, sid_idx, adjustment_value ) ) self.merge_split_adjustments_with_overwrites( pre_adjustments_dict, post_adjustments_dict, adjustments_for_sid, requested_split_adjusted_columns )
def function[collect_split_adjustments, parameter[self, adjustments_for_sid, requested_qtr_data, dates, sid, sid_idx, sid_estimates, split_adjusted_asof_idx, pre_adjustments, post_adjustments, requested_split_adjusted_columns]]: constant[ Collect split adjustments for future quarters. Re-apply adjustments that would be overwritten by overwrites. Merge split adjustments with overwrites into the given dictionary of splits for the given sid. Parameters ---------- adjustments_for_sid : dict[str -> dict[int -> list]] The dictionary of adjustments to which splits need to be added. Initially it contains only overwrites. requested_qtr_data : pd.DataFrame The requested quarter data for each calendar date per sid. dates : pd.DatetimeIndex The calendar dates for which estimates data is requested. sid : int The sid for which adjustments need to be collected. sid_idx : int The index of `sid` in the adjusted array. sid_estimates : pd.DataFrame The raw estimates data for the given sid. split_adjusted_asof_idx : int The index in `dates` as-of which the data is split adjusted. pre_adjustments : tuple(list(float), list(int), pd.DatetimeIndex) The adjustment values and indexes in `dates` for adjustments that happened before the split-asof-date. post_adjustments : tuple(list(float), list(int), pd.DatetimeIndex) The adjustment values, indexes in `dates`, and timestamps for adjustments that happened after the split-asof-date. requested_split_adjusted_columns : list of str List of requested split adjusted column names. ] <ast.Tuple object at 0x7da1b2025990> assign[=] call[name[self]._collect_adjustments, parameter[name[requested_qtr_data], name[sid], name[sid_idx], name[sid_estimates], name[split_adjusted_asof_idx], name[pre_adjustments], name[post_adjustments], name[requested_split_adjusted_columns]]] for taget[name[column_name]] in starred[name[requested_split_adjusted_columns]] begin[:] for taget[name[overwrite_ts]] in starred[call[name[adjustments_for_sid]][name[column_name]]] begin[:] if <ast.BoolOp object at 0x7da1b20066e0> begin[:] for taget[name[split_ts]] in starred[call[name[pre_adjustments_dict]][name[column_name]]] begin[:] if compare[name[split_ts] less[<] name[overwrite_ts]] begin[:] call[call[call[name[adjustments_for_sid]][name[column_name]]][name[overwrite_ts]].extend, parameter[<ast.ListComp object at 0x7da1b20069b0>]] call[name[self].merge_split_adjustments_with_overwrites, parameter[name[pre_adjustments_dict], name[post_adjustments_dict], name[adjustments_for_sid], name[requested_split_adjusted_columns]]]
keyword[def] identifier[collect_split_adjustments] ( identifier[self] , identifier[adjustments_for_sid] , identifier[requested_qtr_data] , identifier[dates] , identifier[sid] , identifier[sid_idx] , identifier[sid_estimates] , identifier[split_adjusted_asof_idx] , identifier[pre_adjustments] , identifier[post_adjustments] , identifier[requested_split_adjusted_columns] ): literal[string] ( identifier[pre_adjustments_dict] , identifier[post_adjustments_dict] )= identifier[self] . identifier[_collect_adjustments] ( identifier[requested_qtr_data] , identifier[sid] , identifier[sid_idx] , identifier[sid_estimates] , identifier[split_adjusted_asof_idx] , identifier[pre_adjustments] , identifier[post_adjustments] , identifier[requested_split_adjusted_columns] , ) keyword[for] identifier[column_name] keyword[in] identifier[requested_split_adjusted_columns] : keyword[for] identifier[overwrite_ts] keyword[in] identifier[adjustments_for_sid] [ identifier[column_name] ]: keyword[if] identifier[overwrite_ts] <= identifier[split_adjusted_asof_idx] keyword[and] identifier[pre_adjustments_dict] : keyword[for] identifier[split_ts] keyword[in] identifier[pre_adjustments_dict] [ identifier[column_name] ]: keyword[if] identifier[split_ts] < identifier[overwrite_ts] : identifier[adjustments_for_sid] [ identifier[column_name] ][ identifier[overwrite_ts] ]. identifier[extend] ([ identifier[Float64Multiply] ( literal[int] , identifier[overwrite_ts] - literal[int] , identifier[sid_idx] , identifier[sid_idx] , identifier[adjustment] . identifier[value] ) keyword[for] identifier[adjustment] keyword[in] identifier[pre_adjustments_dict] [ identifier[column_name] ][ identifier[split_ts] ] ]) keyword[else] : identifier[requested_quarter] = identifier[requested_qtr_data] [ identifier[SHIFTED_NORMALIZED_QTRS] , identifier[sid] ]. identifier[iloc] [ identifier[overwrite_ts] ] keyword[for] identifier[adjustment_value] , identifier[date_index] , identifier[timestamp] keyword[in] identifier[zip] ( * identifier[post_adjustments] ): keyword[if] identifier[split_adjusted_asof_idx] < identifier[date_index] < identifier[overwrite_ts] : identifier[upper_bound] = identifier[overwrite_ts] - literal[int] identifier[end_idx] = identifier[self] . identifier[determine_end_idx_for_adjustment] ( identifier[timestamp] , identifier[dates] , identifier[upper_bound] , identifier[requested_quarter] , identifier[sid_estimates] ) identifier[adjustments_for_sid] [ identifier[column_name] ][ identifier[overwrite_ts] ]. identifier[append] ( identifier[Float64Multiply] ( literal[int] , identifier[end_idx] , identifier[sid_idx] , identifier[sid_idx] , identifier[adjustment_value] ) ) identifier[self] . identifier[merge_split_adjustments_with_overwrites] ( identifier[pre_adjustments_dict] , identifier[post_adjustments_dict] , identifier[adjustments_for_sid] , identifier[requested_split_adjusted_columns] )
def collect_split_adjustments(self, adjustments_for_sid, requested_qtr_data, dates, sid, sid_idx, sid_estimates, split_adjusted_asof_idx, pre_adjustments, post_adjustments, requested_split_adjusted_columns): """ Collect split adjustments for future quarters. Re-apply adjustments that would be overwritten by overwrites. Merge split adjustments with overwrites into the given dictionary of splits for the given sid. Parameters ---------- adjustments_for_sid : dict[str -> dict[int -> list]] The dictionary of adjustments to which splits need to be added. Initially it contains only overwrites. requested_qtr_data : pd.DataFrame The requested quarter data for each calendar date per sid. dates : pd.DatetimeIndex The calendar dates for which estimates data is requested. sid : int The sid for which adjustments need to be collected. sid_idx : int The index of `sid` in the adjusted array. sid_estimates : pd.DataFrame The raw estimates data for the given sid. split_adjusted_asof_idx : int The index in `dates` as-of which the data is split adjusted. pre_adjustments : tuple(list(float), list(int), pd.DatetimeIndex) The adjustment values and indexes in `dates` for adjustments that happened before the split-asof-date. post_adjustments : tuple(list(float), list(int), pd.DatetimeIndex) The adjustment values, indexes in `dates`, and timestamps for adjustments that happened after the split-asof-date. requested_split_adjusted_columns : list of str List of requested split adjusted column names. """ (pre_adjustments_dict, post_adjustments_dict) = self._collect_adjustments(requested_qtr_data, sid, sid_idx, sid_estimates, split_adjusted_asof_idx, pre_adjustments, post_adjustments, requested_split_adjusted_columns) for column_name in requested_split_adjusted_columns: for overwrite_ts in adjustments_for_sid[column_name]: # We need to cumulatively re-apply all adjustments up to the # split-adjusted-asof-date. We might not have any # pre-adjustments, so we should check for that. if overwrite_ts <= split_adjusted_asof_idx and pre_adjustments_dict: for split_ts in pre_adjustments_dict[column_name]: # The split has to have occurred during the span of # the overwrite. if split_ts < overwrite_ts: # Create new adjustments here so that we can # re-apply all applicable adjustments to ONLY # the dates being overwritten. adjustments_for_sid[column_name][overwrite_ts].extend([Float64Multiply(0, overwrite_ts - 1, sid_idx, sid_idx, adjustment.value) for adjustment in pre_adjustments_dict[column_name][split_ts]]) # depends on [control=['if'], data=['split_ts', 'overwrite_ts']] # depends on [control=['for'], data=['split_ts']] # depends on [control=['if'], data=[]] else: # After the split-adjusted-asof-date, we need to re-apply all # adjustments that occur after that date and within the # bounds of the overwrite. They need to be applied starting # from the first date and until an end date. The end date is # the date of the newest information we get about # `requested_quarter` that is >= `split_ts`, or if there is no # new knowledge before `overwrite_ts`, then it is the date # before `overwrite_ts`. # Overwrites happen at the first index of a new quarter, # so determine here which quarter that is. requested_quarter = requested_qtr_data[SHIFTED_NORMALIZED_QTRS, sid].iloc[overwrite_ts] for (adjustment_value, date_index, timestamp) in zip(*post_adjustments): if split_adjusted_asof_idx < date_index < overwrite_ts: # Assume the entire overwrite contains stale data upper_bound = overwrite_ts - 1 end_idx = self.determine_end_idx_for_adjustment(timestamp, dates, upper_bound, requested_quarter, sid_estimates) adjustments_for_sid[column_name][overwrite_ts].append(Float64Multiply(0, end_idx, sid_idx, sid_idx, adjustment_value)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['overwrite_ts']] # depends on [control=['for'], data=['column_name']] self.merge_split_adjustments_with_overwrites(pre_adjustments_dict, post_adjustments_dict, adjustments_for_sid, requested_split_adjusted_columns)
def handle_document(self, item_session: ItemSession, filename: str) -> Actions: '''Process a successful document response. Returns: A value from :class:`.hook.Actions`. ''' self._waiter.reset() action = self.handle_response(item_session) if action == Actions.NORMAL: self._statistics.increment(item_session.response.body.size()) item_session.set_status(Status.done, filename=filename) return action
def function[handle_document, parameter[self, item_session, filename]]: constant[Process a successful document response. Returns: A value from :class:`.hook.Actions`. ] call[name[self]._waiter.reset, parameter[]] variable[action] assign[=] call[name[self].handle_response, parameter[name[item_session]]] if compare[name[action] equal[==] name[Actions].NORMAL] begin[:] call[name[self]._statistics.increment, parameter[call[name[item_session].response.body.size, parameter[]]]] call[name[item_session].set_status, parameter[name[Status].done]] return[name[action]]
keyword[def] identifier[handle_document] ( identifier[self] , identifier[item_session] : identifier[ItemSession] , identifier[filename] : identifier[str] )-> identifier[Actions] : literal[string] identifier[self] . identifier[_waiter] . identifier[reset] () identifier[action] = identifier[self] . identifier[handle_response] ( identifier[item_session] ) keyword[if] identifier[action] == identifier[Actions] . identifier[NORMAL] : identifier[self] . identifier[_statistics] . identifier[increment] ( identifier[item_session] . identifier[response] . identifier[body] . identifier[size] ()) identifier[item_session] . identifier[set_status] ( identifier[Status] . identifier[done] , identifier[filename] = identifier[filename] ) keyword[return] identifier[action]
def handle_document(self, item_session: ItemSession, filename: str) -> Actions: """Process a successful document response. Returns: A value from :class:`.hook.Actions`. """ self._waiter.reset() action = self.handle_response(item_session) if action == Actions.NORMAL: self._statistics.increment(item_session.response.body.size()) item_session.set_status(Status.done, filename=filename) # depends on [control=['if'], data=[]] return action
def removeAssociation(self, server_url, handle): """Remove an association if it exists. Do nothing if it does not. (str, str) -> bool """ assoc = self.getAssociation(server_url, handle) if assoc is None: return 0 else: filename = self.getAssociationFilename(server_url, handle) return _removeIfPresent(filename)
def function[removeAssociation, parameter[self, server_url, handle]]: constant[Remove an association if it exists. Do nothing if it does not. (str, str) -> bool ] variable[assoc] assign[=] call[name[self].getAssociation, parameter[name[server_url], name[handle]]] if compare[name[assoc] is constant[None]] begin[:] return[constant[0]]
keyword[def] identifier[removeAssociation] ( identifier[self] , identifier[server_url] , identifier[handle] ): literal[string] identifier[assoc] = identifier[self] . identifier[getAssociation] ( identifier[server_url] , identifier[handle] ) keyword[if] identifier[assoc] keyword[is] keyword[None] : keyword[return] literal[int] keyword[else] : identifier[filename] = identifier[self] . identifier[getAssociationFilename] ( identifier[server_url] , identifier[handle] ) keyword[return] identifier[_removeIfPresent] ( identifier[filename] )
def removeAssociation(self, server_url, handle): """Remove an association if it exists. Do nothing if it does not. (str, str) -> bool """ assoc = self.getAssociation(server_url, handle) if assoc is None: return 0 # depends on [control=['if'], data=[]] else: filename = self.getAssociationFilename(server_url, handle) return _removeIfPresent(filename)
def item(*args, **kwargs): ''' .. versionadded:: 0.16.2 Return one or more pillar entries from the :ref:`in-memory pillar data <pillar-in-memory>`. delimiter Delimiter used to traverse nested dictionaries. .. note:: This is different from :py:func:`pillar.get <salt.modules.pillar.get>` in that no default value can be specified. :py:func:`pillar.get <salt.modules.pillar.get>` should probably still be used in most cases to retrieve nested pillar values, as it is a bit more flexible. One reason to use this function instead of :py:func:`pillar.get <salt.modules.pillar.get>` however is when it is desirable to retrieve the values of more than one key, since :py:func:`pillar.get <salt.modules.pillar.get>` can only retrieve one key at a time. .. versionadded:: 2015.8.0 pillarenv If specified, this function will query the master to generate fresh pillar data on the fly, specifically from the requested pillar environment. Note that this can produce different pillar data than executing this function without an environment, as its normal behavior is just to return a value from minion's pillar data in memory (which can be sourced from more than one pillar environment). Using this argument will not affect the pillar data in memory. It will however be slightly slower and use more resources on the master due to the need for the master to generate and send the minion fresh pillar data. This tradeoff in performance however allows for the use case where pillar data is desired only from a single environment. .. versionadded:: 2017.7.6,2018.3.1 saltenv Included only for compatibility with :conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored. .. versionadded:: 2017.7.6,2018.3.1 CLI Examples: .. code-block:: bash salt '*' pillar.item foo salt '*' pillar.item foo:bar salt '*' pillar.item foo bar baz ''' ret = {} default = kwargs.get('default', '') delimiter = kwargs.get('delimiter', DEFAULT_TARGET_DELIM) pillarenv = kwargs.get('pillarenv', None) saltenv = kwargs.get('saltenv', None) pillar_dict = __pillar__ \ if all(x is None for x in (saltenv, pillarenv)) \ else items(saltenv=saltenv, pillarenv=pillarenv) try: for arg in args: ret[arg] = salt.utils.data.traverse_dict_and_list( pillar_dict, arg, default, delimiter) except KeyError: pass return ret
def function[item, parameter[]]: constant[ .. versionadded:: 0.16.2 Return one or more pillar entries from the :ref:`in-memory pillar data <pillar-in-memory>`. delimiter Delimiter used to traverse nested dictionaries. .. note:: This is different from :py:func:`pillar.get <salt.modules.pillar.get>` in that no default value can be specified. :py:func:`pillar.get <salt.modules.pillar.get>` should probably still be used in most cases to retrieve nested pillar values, as it is a bit more flexible. One reason to use this function instead of :py:func:`pillar.get <salt.modules.pillar.get>` however is when it is desirable to retrieve the values of more than one key, since :py:func:`pillar.get <salt.modules.pillar.get>` can only retrieve one key at a time. .. versionadded:: 2015.8.0 pillarenv If specified, this function will query the master to generate fresh pillar data on the fly, specifically from the requested pillar environment. Note that this can produce different pillar data than executing this function without an environment, as its normal behavior is just to return a value from minion's pillar data in memory (which can be sourced from more than one pillar environment). Using this argument will not affect the pillar data in memory. It will however be slightly slower and use more resources on the master due to the need for the master to generate and send the minion fresh pillar data. This tradeoff in performance however allows for the use case where pillar data is desired only from a single environment. .. versionadded:: 2017.7.6,2018.3.1 saltenv Included only for compatibility with :conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored. .. versionadded:: 2017.7.6,2018.3.1 CLI Examples: .. code-block:: bash salt '*' pillar.item foo salt '*' pillar.item foo:bar salt '*' pillar.item foo bar baz ] variable[ret] assign[=] dictionary[[], []] variable[default] assign[=] call[name[kwargs].get, parameter[constant[default], constant[]]] variable[delimiter] assign[=] call[name[kwargs].get, parameter[constant[delimiter], name[DEFAULT_TARGET_DELIM]]] variable[pillarenv] assign[=] call[name[kwargs].get, parameter[constant[pillarenv], constant[None]]] variable[saltenv] assign[=] call[name[kwargs].get, parameter[constant[saltenv], constant[None]]] variable[pillar_dict] assign[=] <ast.IfExp object at 0x7da1b2347130> <ast.Try object at 0x7da1b2347be0> return[name[ret]]
keyword[def] identifier[item] (* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[ret] ={} identifier[default] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ) identifier[delimiter] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[DEFAULT_TARGET_DELIM] ) identifier[pillarenv] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[saltenv] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) identifier[pillar_dict] = identifier[__pillar__] keyword[if] identifier[all] ( identifier[x] keyword[is] keyword[None] keyword[for] identifier[x] keyword[in] ( identifier[saltenv] , identifier[pillarenv] )) keyword[else] identifier[items] ( identifier[saltenv] = identifier[saltenv] , identifier[pillarenv] = identifier[pillarenv] ) keyword[try] : keyword[for] identifier[arg] keyword[in] identifier[args] : identifier[ret] [ identifier[arg] ]= identifier[salt] . identifier[utils] . identifier[data] . identifier[traverse_dict_and_list] ( identifier[pillar_dict] , identifier[arg] , identifier[default] , identifier[delimiter] ) keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[ret]
def item(*args, **kwargs): """ .. versionadded:: 0.16.2 Return one or more pillar entries from the :ref:`in-memory pillar data <pillar-in-memory>`. delimiter Delimiter used to traverse nested dictionaries. .. note:: This is different from :py:func:`pillar.get <salt.modules.pillar.get>` in that no default value can be specified. :py:func:`pillar.get <salt.modules.pillar.get>` should probably still be used in most cases to retrieve nested pillar values, as it is a bit more flexible. One reason to use this function instead of :py:func:`pillar.get <salt.modules.pillar.get>` however is when it is desirable to retrieve the values of more than one key, since :py:func:`pillar.get <salt.modules.pillar.get>` can only retrieve one key at a time. .. versionadded:: 2015.8.0 pillarenv If specified, this function will query the master to generate fresh pillar data on the fly, specifically from the requested pillar environment. Note that this can produce different pillar data than executing this function without an environment, as its normal behavior is just to return a value from minion's pillar data in memory (which can be sourced from more than one pillar environment). Using this argument will not affect the pillar data in memory. It will however be slightly slower and use more resources on the master due to the need for the master to generate and send the minion fresh pillar data. This tradeoff in performance however allows for the use case where pillar data is desired only from a single environment. .. versionadded:: 2017.7.6,2018.3.1 saltenv Included only for compatibility with :conf_minion:`pillarenv_from_saltenv`, and is otherwise ignored. .. versionadded:: 2017.7.6,2018.3.1 CLI Examples: .. code-block:: bash salt '*' pillar.item foo salt '*' pillar.item foo:bar salt '*' pillar.item foo bar baz """ ret = {} default = kwargs.get('default', '') delimiter = kwargs.get('delimiter', DEFAULT_TARGET_DELIM) pillarenv = kwargs.get('pillarenv', None) saltenv = kwargs.get('saltenv', None) pillar_dict = __pillar__ if all((x is None for x in (saltenv, pillarenv))) else items(saltenv=saltenv, pillarenv=pillarenv) try: for arg in args: ret[arg] = salt.utils.data.traverse_dict_and_list(pillar_dict, arg, default, delimiter) # depends on [control=['for'], data=['arg']] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] return ret
def map_names(lang="en"): """This resource returns an dictionary of the localized map names for the specified language. Only maps with events are listed - if you need a list of all maps, use ``maps.json`` instead. :param lang: The language to query the names for. :return: the response is a dictionary where the key is the map id and the value is the name of the map in the specified language. """ cache_name = "map_names.%s.json" % lang data = get_cached("map_names.json", cache_name, params=dict(lang=lang)) return dict([(item["id"], item["name"]) for item in data])
def function[map_names, parameter[lang]]: constant[This resource returns an dictionary of the localized map names for the specified language. Only maps with events are listed - if you need a list of all maps, use ``maps.json`` instead. :param lang: The language to query the names for. :return: the response is a dictionary where the key is the map id and the value is the name of the map in the specified language. ] variable[cache_name] assign[=] binary_operation[constant[map_names.%s.json] <ast.Mod object at 0x7da2590d6920> name[lang]] variable[data] assign[=] call[name[get_cached], parameter[constant[map_names.json], name[cache_name]]] return[call[name[dict], parameter[<ast.ListComp object at 0x7da204623430>]]]
keyword[def] identifier[map_names] ( identifier[lang] = literal[string] ): literal[string] identifier[cache_name] = literal[string] % identifier[lang] identifier[data] = identifier[get_cached] ( literal[string] , identifier[cache_name] , identifier[params] = identifier[dict] ( identifier[lang] = identifier[lang] )) keyword[return] identifier[dict] ([( identifier[item] [ literal[string] ], identifier[item] [ literal[string] ]) keyword[for] identifier[item] keyword[in] identifier[data] ])
def map_names(lang='en'): """This resource returns an dictionary of the localized map names for the specified language. Only maps with events are listed - if you need a list of all maps, use ``maps.json`` instead. :param lang: The language to query the names for. :return: the response is a dictionary where the key is the map id and the value is the name of the map in the specified language. """ cache_name = 'map_names.%s.json' % lang data = get_cached('map_names.json', cache_name, params=dict(lang=lang)) return dict([(item['id'], item['name']) for item in data])
def msg2usernames(msg, processor=None, legacy=False, **config): """ Return a set of FAS usernames associated with a message. """ return processor.usernames(msg, **config)
def function[msg2usernames, parameter[msg, processor, legacy]]: constant[ Return a set of FAS usernames associated with a message. ] return[call[name[processor].usernames, parameter[name[msg]]]]
keyword[def] identifier[msg2usernames] ( identifier[msg] , identifier[processor] = keyword[None] , identifier[legacy] = keyword[False] ,** identifier[config] ): literal[string] keyword[return] identifier[processor] . identifier[usernames] ( identifier[msg] ,** identifier[config] )
def msg2usernames(msg, processor=None, legacy=False, **config): """ Return a set of FAS usernames associated with a message. """ return processor.usernames(msg, **config)
def index(request, obj_id=None): """Handles a request based on method and calls the appropriate function""" if request.method == 'GET': return get(request, obj_id) elif request.method == 'POST': return post(request) elif request.method == 'PUT': getPutData(request) return put(request, obj_id) elif request.method == 'DELETE': getPutData(request) return delete(request, obj_id)
def function[index, parameter[request, obj_id]]: constant[Handles a request based on method and calls the appropriate function] if compare[name[request].method equal[==] constant[GET]] begin[:] return[call[name[get], parameter[name[request], name[obj_id]]]]
keyword[def] identifier[index] ( identifier[request] , identifier[obj_id] = keyword[None] ): literal[string] keyword[if] identifier[request] . identifier[method] == literal[string] : keyword[return] identifier[get] ( identifier[request] , identifier[obj_id] ) keyword[elif] identifier[request] . identifier[method] == literal[string] : keyword[return] identifier[post] ( identifier[request] ) keyword[elif] identifier[request] . identifier[method] == literal[string] : identifier[getPutData] ( identifier[request] ) keyword[return] identifier[put] ( identifier[request] , identifier[obj_id] ) keyword[elif] identifier[request] . identifier[method] == literal[string] : identifier[getPutData] ( identifier[request] ) keyword[return] identifier[delete] ( identifier[request] , identifier[obj_id] )
def index(request, obj_id=None): """Handles a request based on method and calls the appropriate function""" if request.method == 'GET': return get(request, obj_id) # depends on [control=['if'], data=[]] elif request.method == 'POST': return post(request) # depends on [control=['if'], data=[]] elif request.method == 'PUT': getPutData(request) return put(request, obj_id) # depends on [control=['if'], data=[]] elif request.method == 'DELETE': getPutData(request) return delete(request, obj_id) # depends on [control=['if'], data=[]]
def del_doc(self, doc): """ Delete a document """ if not self.index_writer: self.index_writer = self.index.writer() if not self.label_guesser_updater: self.label_guesser_updater = self.label_guesser.get_updater() logger.info("Removing doc from the index: %s" % doc) if doc.docid in self._docs_by_id: self._docs_by_id.pop(doc.docid) if isinstance(doc, str): # annoying case : we can't know which labels were on it # so we can't roll back the label guesser training ... self._delete_doc_from_index(self.index_writer, doc) return self._delete_doc_from_index(self.index_writer, doc.docid) self.label_guesser_updater.del_doc(doc)
def function[del_doc, parameter[self, doc]]: constant[ Delete a document ] if <ast.UnaryOp object at 0x7da20c76f250> begin[:] name[self].index_writer assign[=] call[name[self].index.writer, parameter[]] if <ast.UnaryOp object at 0x7da20c76f9d0> begin[:] name[self].label_guesser_updater assign[=] call[name[self].label_guesser.get_updater, parameter[]] call[name[logger].info, parameter[binary_operation[constant[Removing doc from the index: %s] <ast.Mod object at 0x7da2590d6920> name[doc]]]] if compare[name[doc].docid in name[self]._docs_by_id] begin[:] call[name[self]._docs_by_id.pop, parameter[name[doc].docid]] if call[name[isinstance], parameter[name[doc], name[str]]] begin[:] call[name[self]._delete_doc_from_index, parameter[name[self].index_writer, name[doc]]] return[None] call[name[self]._delete_doc_from_index, parameter[name[self].index_writer, name[doc].docid]] call[name[self].label_guesser_updater.del_doc, parameter[name[doc]]]
keyword[def] identifier[del_doc] ( identifier[self] , identifier[doc] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[index_writer] : identifier[self] . identifier[index_writer] = identifier[self] . identifier[index] . identifier[writer] () keyword[if] keyword[not] identifier[self] . identifier[label_guesser_updater] : identifier[self] . identifier[label_guesser_updater] = identifier[self] . identifier[label_guesser] . identifier[get_updater] () identifier[logger] . identifier[info] ( literal[string] % identifier[doc] ) keyword[if] identifier[doc] . identifier[docid] keyword[in] identifier[self] . identifier[_docs_by_id] : identifier[self] . identifier[_docs_by_id] . identifier[pop] ( identifier[doc] . identifier[docid] ) keyword[if] identifier[isinstance] ( identifier[doc] , identifier[str] ): identifier[self] . identifier[_delete_doc_from_index] ( identifier[self] . identifier[index_writer] , identifier[doc] ) keyword[return] identifier[self] . identifier[_delete_doc_from_index] ( identifier[self] . identifier[index_writer] , identifier[doc] . identifier[docid] ) identifier[self] . identifier[label_guesser_updater] . identifier[del_doc] ( identifier[doc] )
def del_doc(self, doc): """ Delete a document """ if not self.index_writer: self.index_writer = self.index.writer() # depends on [control=['if'], data=[]] if not self.label_guesser_updater: self.label_guesser_updater = self.label_guesser.get_updater() # depends on [control=['if'], data=[]] logger.info('Removing doc from the index: %s' % doc) if doc.docid in self._docs_by_id: self._docs_by_id.pop(doc.docid) # depends on [control=['if'], data=[]] if isinstance(doc, str): # annoying case : we can't know which labels were on it # so we can't roll back the label guesser training ... self._delete_doc_from_index(self.index_writer, doc) return # depends on [control=['if'], data=[]] self._delete_doc_from_index(self.index_writer, doc.docid) self.label_guesser_updater.del_doc(doc)
def disable_user(): """ Disables a user in the data store .. example:: $ curl http://localhost:5000/disable_user -X POST \ -H "Authorization: Bearer <your_token>" \ -d '{"username":"Walter"}' """ req = flask.request.get_json(force=True) usr = User.query.filter_by(username=req.get('username', None)).one() usr.is_active = False db.session.commit() return flask.jsonify(message='disabled user {}'.format(usr.username))
def function[disable_user, parameter[]]: constant[ Disables a user in the data store .. example:: $ curl http://localhost:5000/disable_user -X POST -H "Authorization: Bearer <your_token>" -d '{"username":"Walter"}' ] variable[req] assign[=] call[name[flask].request.get_json, parameter[]] variable[usr] assign[=] call[call[name[User].query.filter_by, parameter[]].one, parameter[]] name[usr].is_active assign[=] constant[False] call[name[db].session.commit, parameter[]] return[call[name[flask].jsonify, parameter[]]]
keyword[def] identifier[disable_user] (): literal[string] identifier[req] = identifier[flask] . identifier[request] . identifier[get_json] ( identifier[force] = keyword[True] ) identifier[usr] = identifier[User] . identifier[query] . identifier[filter_by] ( identifier[username] = identifier[req] . identifier[get] ( literal[string] , keyword[None] )). identifier[one] () identifier[usr] . identifier[is_active] = keyword[False] identifier[db] . identifier[session] . identifier[commit] () keyword[return] identifier[flask] . identifier[jsonify] ( identifier[message] = literal[string] . identifier[format] ( identifier[usr] . identifier[username] ))
def disable_user(): """ Disables a user in the data store .. example:: $ curl http://localhost:5000/disable_user -X POST -H "Authorization: Bearer <your_token>" -d '{"username":"Walter"}' """ req = flask.request.get_json(force=True) usr = User.query.filter_by(username=req.get('username', None)).one() usr.is_active = False db.session.commit() return flask.jsonify(message='disabled user {}'.format(usr.username))
def abort(self): """ Abort an initiated SASL authentication process. The expected result state is ``failure``. """ if self._state == SASLState.INITIAL: raise RuntimeError("SASL authentication hasn't started yet") if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE: raise RuntimeError("SASL message exchange already over") try: return (yield from self.interface.abort()) finally: self._state = SASLState.FAILURE
def function[abort, parameter[self]]: constant[ Abort an initiated SASL authentication process. The expected result state is ``failure``. ] if compare[name[self]._state equal[==] name[SASLState].INITIAL] begin[:] <ast.Raise object at 0x7da18f722e60> if compare[name[self]._state equal[==] name[SASLState].SUCCESS_SIMULATE_CHALLENGE] begin[:] <ast.Raise object at 0x7da18f722cb0> <ast.Try object at 0x7da18f722ef0>
keyword[def] identifier[abort] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_state] == identifier[SASLState] . identifier[INITIAL] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[if] identifier[self] . identifier[_state] == identifier[SASLState] . identifier[SUCCESS_SIMULATE_CHALLENGE] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[try] : keyword[return] ( keyword[yield] keyword[from] identifier[self] . identifier[interface] . identifier[abort] ()) keyword[finally] : identifier[self] . identifier[_state] = identifier[SASLState] . identifier[FAILURE]
def abort(self): """ Abort an initiated SASL authentication process. The expected result state is ``failure``. """ if self._state == SASLState.INITIAL: raise RuntimeError("SASL authentication hasn't started yet") # depends on [control=['if'], data=[]] if self._state == SASLState.SUCCESS_SIMULATE_CHALLENGE: raise RuntimeError('SASL message exchange already over') # depends on [control=['if'], data=[]] try: return (yield from self.interface.abort()) # depends on [control=['try'], data=[]] finally: self._state = SASLState.FAILURE
def find_period(y, Tapprox): """ Find oscillation period of y(t). Parameter Tapprox is the approximate period. The code finds the time between 0.7 * Tapprox and 1.3 * Tapprox where y(t)[1] = d/dt theta(t) vanishes. This is the period. """ def dtheta_dt(t): """ vanishes when dtheta/dt = 0 """ return y(t)[1] return gv.root.refine(dtheta_dt, (0.7 * Tapprox, 1.3 * Tapprox))
def function[find_period, parameter[y, Tapprox]]: constant[ Find oscillation period of y(t). Parameter Tapprox is the approximate period. The code finds the time between 0.7 * Tapprox and 1.3 * Tapprox where y(t)[1] = d/dt theta(t) vanishes. This is the period. ] def function[dtheta_dt, parameter[t]]: constant[ vanishes when dtheta/dt = 0 ] return[call[call[name[y], parameter[name[t]]]][constant[1]]] return[call[name[gv].root.refine, parameter[name[dtheta_dt], tuple[[<ast.BinOp object at 0x7da20c992b90>, <ast.BinOp object at 0x7da20c991d20>]]]]]
keyword[def] identifier[find_period] ( identifier[y] , identifier[Tapprox] ): literal[string] keyword[def] identifier[dtheta_dt] ( identifier[t] ): literal[string] keyword[return] identifier[y] ( identifier[t] )[ literal[int] ] keyword[return] identifier[gv] . identifier[root] . identifier[refine] ( identifier[dtheta_dt] ,( literal[int] * identifier[Tapprox] , literal[int] * identifier[Tapprox] ))
def find_period(y, Tapprox): """ Find oscillation period of y(t). Parameter Tapprox is the approximate period. The code finds the time between 0.7 * Tapprox and 1.3 * Tapprox where y(t)[1] = d/dt theta(t) vanishes. This is the period. """ def dtheta_dt(t): """ vanishes when dtheta/dt = 0 """ return y(t)[1] return gv.root.refine(dtheta_dt, (0.7 * Tapprox, 1.3 * Tapprox))
def one(self): """Return a tensor of all ones. Examples -------- >>> space = odl.rn(3) >>> x = space.one() >>> x rn(3).element([ 1., 1., 1.]) """ return self.element(np.ones(self.shape, dtype=self.dtype, order=self.default_order))
def function[one, parameter[self]]: constant[Return a tensor of all ones. Examples -------- >>> space = odl.rn(3) >>> x = space.one() >>> x rn(3).element([ 1., 1., 1.]) ] return[call[name[self].element, parameter[call[name[np].ones, parameter[name[self].shape]]]]]
keyword[def] identifier[one] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[element] ( identifier[np] . identifier[ones] ( identifier[self] . identifier[shape] , identifier[dtype] = identifier[self] . identifier[dtype] , identifier[order] = identifier[self] . identifier[default_order] ))
def one(self): """Return a tensor of all ones. Examples -------- >>> space = odl.rn(3) >>> x = space.one() >>> x rn(3).element([ 1., 1., 1.]) """ return self.element(np.ones(self.shape, dtype=self.dtype, order=self.default_order))
def _SetHeader(self, new_values): """Sets header of table to the given tuple. Args: new_values: Tuple of new header values. """ row = self.row_class() row.row = 0 for v in new_values: row[v] = v self._table[0] = row
def function[_SetHeader, parameter[self, new_values]]: constant[Sets header of table to the given tuple. Args: new_values: Tuple of new header values. ] variable[row] assign[=] call[name[self].row_class, parameter[]] name[row].row assign[=] constant[0] for taget[name[v]] in starred[name[new_values]] begin[:] call[name[row]][name[v]] assign[=] name[v] call[name[self]._table][constant[0]] assign[=] name[row]
keyword[def] identifier[_SetHeader] ( identifier[self] , identifier[new_values] ): literal[string] identifier[row] = identifier[self] . identifier[row_class] () identifier[row] . identifier[row] = literal[int] keyword[for] identifier[v] keyword[in] identifier[new_values] : identifier[row] [ identifier[v] ]= identifier[v] identifier[self] . identifier[_table] [ literal[int] ]= identifier[row]
def _SetHeader(self, new_values): """Sets header of table to the given tuple. Args: new_values: Tuple of new header values. """ row = self.row_class() row.row = 0 for v in new_values: row[v] = v # depends on [control=['for'], data=['v']] self._table[0] = row
def post_build(self, pkt, pay): """ Implements the swap-bytes functionality when building this is based on a copy of the Packet.self_build default method. The goal is to affect only the CAN layer data and keep under layers (e.g LinuxCooked) unchanged """ if conf.contribs['CAN']['swap-bytes']: return CAN.inv_endianness(pkt) + pay return pkt + pay
def function[post_build, parameter[self, pkt, pay]]: constant[ Implements the swap-bytes functionality when building this is based on a copy of the Packet.self_build default method. The goal is to affect only the CAN layer data and keep under layers (e.g LinuxCooked) unchanged ] if call[call[name[conf].contribs][constant[CAN]]][constant[swap-bytes]] begin[:] return[binary_operation[call[name[CAN].inv_endianness, parameter[name[pkt]]] + name[pay]]] return[binary_operation[name[pkt] + name[pay]]]
keyword[def] identifier[post_build] ( identifier[self] , identifier[pkt] , identifier[pay] ): literal[string] keyword[if] identifier[conf] . identifier[contribs] [ literal[string] ][ literal[string] ]: keyword[return] identifier[CAN] . identifier[inv_endianness] ( identifier[pkt] )+ identifier[pay] keyword[return] identifier[pkt] + identifier[pay]
def post_build(self, pkt, pay): """ Implements the swap-bytes functionality when building this is based on a copy of the Packet.self_build default method. The goal is to affect only the CAN layer data and keep under layers (e.g LinuxCooked) unchanged """ if conf.contribs['CAN']['swap-bytes']: return CAN.inv_endianness(pkt) + pay # depends on [control=['if'], data=[]] return pkt + pay
def projective_measurement_constraints(*parties): """Return a set of constraints that define projective measurements. :param parties: Measurements of different parties. :type A: list or tuple of list of list of :class:`sympy.physics.quantum.operator.HermitianOperator`. :returns: substitutions containing idempotency, orthogonality and commutation relations. """ substitutions = {} # Idempotency and orthogonality of projectors if isinstance(parties[0][0][0], list): parties = parties[0] for party in parties: for measurement in party: for projector1 in measurement: for projector2 in measurement: if projector1 == projector2: substitutions[projector1**2] = projector1 else: substitutions[projector1*projector2] = 0 substitutions[projector2*projector1] = 0 # Projectors commute between parties in a partition for n1 in range(len(parties)): for n2 in range(n1+1, len(parties)): for measurement1 in parties[n1]: for measurement2 in parties[n2]: for projector1 in measurement1: for projector2 in measurement2: substitutions[projector2*projector1] = \ projector1*projector2 return substitutions
def function[projective_measurement_constraints, parameter[]]: constant[Return a set of constraints that define projective measurements. :param parties: Measurements of different parties. :type A: list or tuple of list of list of :class:`sympy.physics.quantum.operator.HermitianOperator`. :returns: substitutions containing idempotency, orthogonality and commutation relations. ] variable[substitutions] assign[=] dictionary[[], []] if call[name[isinstance], parameter[call[call[call[name[parties]][constant[0]]][constant[0]]][constant[0]], name[list]]] begin[:] variable[parties] assign[=] call[name[parties]][constant[0]] for taget[name[party]] in starred[name[parties]] begin[:] for taget[name[measurement]] in starred[name[party]] begin[:] for taget[name[projector1]] in starred[name[measurement]] begin[:] for taget[name[projector2]] in starred[name[measurement]] begin[:] if compare[name[projector1] equal[==] name[projector2]] begin[:] call[name[substitutions]][binary_operation[name[projector1] ** constant[2]]] assign[=] name[projector1] for taget[name[n1]] in starred[call[name[range], parameter[call[name[len], parameter[name[parties]]]]]] begin[:] for taget[name[n2]] in starred[call[name[range], parameter[binary_operation[name[n1] + constant[1]], call[name[len], parameter[name[parties]]]]]] begin[:] for taget[name[measurement1]] in starred[call[name[parties]][name[n1]]] begin[:] for taget[name[measurement2]] in starred[call[name[parties]][name[n2]]] begin[:] for taget[name[projector1]] in starred[name[measurement1]] begin[:] for taget[name[projector2]] in starred[name[measurement2]] begin[:] call[name[substitutions]][binary_operation[name[projector2] * name[projector1]]] assign[=] binary_operation[name[projector1] * name[projector2]] return[name[substitutions]]
keyword[def] identifier[projective_measurement_constraints] (* identifier[parties] ): literal[string] identifier[substitutions] ={} keyword[if] identifier[isinstance] ( identifier[parties] [ literal[int] ][ literal[int] ][ literal[int] ], identifier[list] ): identifier[parties] = identifier[parties] [ literal[int] ] keyword[for] identifier[party] keyword[in] identifier[parties] : keyword[for] identifier[measurement] keyword[in] identifier[party] : keyword[for] identifier[projector1] keyword[in] identifier[measurement] : keyword[for] identifier[projector2] keyword[in] identifier[measurement] : keyword[if] identifier[projector1] == identifier[projector2] : identifier[substitutions] [ identifier[projector1] ** literal[int] ]= identifier[projector1] keyword[else] : identifier[substitutions] [ identifier[projector1] * identifier[projector2] ]= literal[int] identifier[substitutions] [ identifier[projector2] * identifier[projector1] ]= literal[int] keyword[for] identifier[n1] keyword[in] identifier[range] ( identifier[len] ( identifier[parties] )): keyword[for] identifier[n2] keyword[in] identifier[range] ( identifier[n1] + literal[int] , identifier[len] ( identifier[parties] )): keyword[for] identifier[measurement1] keyword[in] identifier[parties] [ identifier[n1] ]: keyword[for] identifier[measurement2] keyword[in] identifier[parties] [ identifier[n2] ]: keyword[for] identifier[projector1] keyword[in] identifier[measurement1] : keyword[for] identifier[projector2] keyword[in] identifier[measurement2] : identifier[substitutions] [ identifier[projector2] * identifier[projector1] ]= identifier[projector1] * identifier[projector2] keyword[return] identifier[substitutions]
def projective_measurement_constraints(*parties): """Return a set of constraints that define projective measurements. :param parties: Measurements of different parties. :type A: list or tuple of list of list of :class:`sympy.physics.quantum.operator.HermitianOperator`. :returns: substitutions containing idempotency, orthogonality and commutation relations. """ substitutions = {} # Idempotency and orthogonality of projectors if isinstance(parties[0][0][0], list): parties = parties[0] # depends on [control=['if'], data=[]] for party in parties: for measurement in party: for projector1 in measurement: for projector2 in measurement: if projector1 == projector2: substitutions[projector1 ** 2] = projector1 # depends on [control=['if'], data=['projector1']] else: substitutions[projector1 * projector2] = 0 substitutions[projector2 * projector1] = 0 # depends on [control=['for'], data=['projector2']] # depends on [control=['for'], data=['projector1']] # depends on [control=['for'], data=['measurement']] # depends on [control=['for'], data=['party']] # Projectors commute between parties in a partition for n1 in range(len(parties)): for n2 in range(n1 + 1, len(parties)): for measurement1 in parties[n1]: for measurement2 in parties[n2]: for projector1 in measurement1: for projector2 in measurement2: substitutions[projector2 * projector1] = projector1 * projector2 # depends on [control=['for'], data=['projector2']] # depends on [control=['for'], data=['projector1']] # depends on [control=['for'], data=['measurement2']] # depends on [control=['for'], data=['measurement1']] # depends on [control=['for'], data=['n2']] # depends on [control=['for'], data=['n1']] return substitutions
def propertyContainer(self, ulBuffer): """retrieves the property container of an buffer.""" fn = self.function_table.propertyContainer result = fn(ulBuffer) return result
def function[propertyContainer, parameter[self, ulBuffer]]: constant[retrieves the property container of an buffer.] variable[fn] assign[=] name[self].function_table.propertyContainer variable[result] assign[=] call[name[fn], parameter[name[ulBuffer]]] return[name[result]]
keyword[def] identifier[propertyContainer] ( identifier[self] , identifier[ulBuffer] ): literal[string] identifier[fn] = identifier[self] . identifier[function_table] . identifier[propertyContainer] identifier[result] = identifier[fn] ( identifier[ulBuffer] ) keyword[return] identifier[result]
def propertyContainer(self, ulBuffer): """retrieves the property container of an buffer.""" fn = self.function_table.propertyContainer result = fn(ulBuffer) return result
def hashmodel(model, library=None): '''Calculate the Hash id of metaclass ``meta``''' library = library or 'python-stdnet' meta = model._meta sha = hashlib.sha1(to_bytes('{0}({1})'.format(library, meta))) hash = sha.hexdigest()[:8] meta.hash = hash if hash in _model_dict: raise KeyError('Model "{0}" already in hash table.\ Rename your model or the module containing the model.'.format(meta)) _model_dict[hash] = model
def function[hashmodel, parameter[model, library]]: constant[Calculate the Hash id of metaclass ``meta``] variable[library] assign[=] <ast.BoolOp object at 0x7da1b0e6fc70> variable[meta] assign[=] name[model]._meta variable[sha] assign[=] call[name[hashlib].sha1, parameter[call[name[to_bytes], parameter[call[constant[{0}({1})].format, parameter[name[library], name[meta]]]]]]] variable[hash] assign[=] call[call[name[sha].hexdigest, parameter[]]][<ast.Slice object at 0x7da1b0e6f7f0>] name[meta].hash assign[=] name[hash] if compare[name[hash] in name[_model_dict]] begin[:] <ast.Raise object at 0x7da1b0eaddb0> call[name[_model_dict]][name[hash]] assign[=] name[model]
keyword[def] identifier[hashmodel] ( identifier[model] , identifier[library] = keyword[None] ): literal[string] identifier[library] = identifier[library] keyword[or] literal[string] identifier[meta] = identifier[model] . identifier[_meta] identifier[sha] = identifier[hashlib] . identifier[sha1] ( identifier[to_bytes] ( literal[string] . identifier[format] ( identifier[library] , identifier[meta] ))) identifier[hash] = identifier[sha] . identifier[hexdigest] ()[: literal[int] ] identifier[meta] . identifier[hash] = identifier[hash] keyword[if] identifier[hash] keyword[in] identifier[_model_dict] : keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[meta] )) identifier[_model_dict] [ identifier[hash] ]= identifier[model]
def hashmodel(model, library=None): """Calculate the Hash id of metaclass ``meta``""" library = library or 'python-stdnet' meta = model._meta sha = hashlib.sha1(to_bytes('{0}({1})'.format(library, meta))) hash = sha.hexdigest()[:8] meta.hash = hash if hash in _model_dict: raise KeyError('Model "{0}" already in hash table. Rename your model or the module containing the model.'.format(meta)) # depends on [control=['if'], data=[]] _model_dict[hash] = model
def enrich(self, sample, ontologyClass, path, callback=None, output='application/json'): """ Class Enrichment Service from: /analyzer/enrichment Arguments: sample: A list of CURIEs for nodes whose attributes are to be tested for enrichment. For example, a list of genes. ontologyClass: CURIE for parent ontology class for the attribute to be tested. For example, GO biological process path: A path expression that connects sample nodes to attribute class nodes callback: Name of the JSONP callback ('fn' by default). Supplying this parameter or requesting a javascript media type will cause a JSONP response to be rendered. outputs: application/json """ kwargs = {'sample':sample, 'ontologyClass':ontologyClass, 'path':path, 'callback':callback} kwargs = {k:dumps(v) if type(v) is dict else v for k, v in kwargs.items()} param_rest = self._make_rest('path', **kwargs) url = self._basePath + ('/analyzer/enrichment').format(**kwargs) requests_params = {k:v for k, v in kwargs.items() if k != 'path'} return self._get('GET', url, requests_params, output)
def function[enrich, parameter[self, sample, ontologyClass, path, callback, output]]: constant[ Class Enrichment Service from: /analyzer/enrichment Arguments: sample: A list of CURIEs for nodes whose attributes are to be tested for enrichment. For example, a list of genes. ontologyClass: CURIE for parent ontology class for the attribute to be tested. For example, GO biological process path: A path expression that connects sample nodes to attribute class nodes callback: Name of the JSONP callback ('fn' by default). Supplying this parameter or requesting a javascript media type will cause a JSONP response to be rendered. outputs: application/json ] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da20e956410>, <ast.Constant object at 0x7da20e955780>, <ast.Constant object at 0x7da20e955c60>, <ast.Constant object at 0x7da20e954a60>], [<ast.Name object at 0x7da20e9550c0>, <ast.Name object at 0x7da20e954580>, <ast.Name object at 0x7da20e955db0>, <ast.Name object at 0x7da20e9557e0>]] variable[kwargs] assign[=] <ast.DictComp object at 0x7da20e954d00> variable[param_rest] assign[=] call[name[self]._make_rest, parameter[constant[path]]] variable[url] assign[=] binary_operation[name[self]._basePath + call[constant[/analyzer/enrichment].format, parameter[]]] variable[requests_params] assign[=] <ast.DictComp object at 0x7da20e956200> return[call[name[self]._get, parameter[constant[GET], name[url], name[requests_params], name[output]]]]
keyword[def] identifier[enrich] ( identifier[self] , identifier[sample] , identifier[ontologyClass] , identifier[path] , identifier[callback] = keyword[None] , identifier[output] = literal[string] ): literal[string] identifier[kwargs] ={ literal[string] : identifier[sample] , literal[string] : identifier[ontologyClass] , literal[string] : identifier[path] , literal[string] : identifier[callback] } identifier[kwargs] ={ identifier[k] : identifier[dumps] ( identifier[v] ) keyword[if] identifier[type] ( identifier[v] ) keyword[is] identifier[dict] keyword[else] identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ()} identifier[param_rest] = identifier[self] . identifier[_make_rest] ( literal[string] ,** identifier[kwargs] ) identifier[url] = identifier[self] . identifier[_basePath] +( literal[string] ). identifier[format] (** identifier[kwargs] ) identifier[requests_params] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] () keyword[if] identifier[k] != literal[string] } keyword[return] identifier[self] . identifier[_get] ( literal[string] , identifier[url] , identifier[requests_params] , identifier[output] )
def enrich(self, sample, ontologyClass, path, callback=None, output='application/json'): """ Class Enrichment Service from: /analyzer/enrichment Arguments: sample: A list of CURIEs for nodes whose attributes are to be tested for enrichment. For example, a list of genes. ontologyClass: CURIE for parent ontology class for the attribute to be tested. For example, GO biological process path: A path expression that connects sample nodes to attribute class nodes callback: Name of the JSONP callback ('fn' by default). Supplying this parameter or requesting a javascript media type will cause a JSONP response to be rendered. outputs: application/json """ kwargs = {'sample': sample, 'ontologyClass': ontologyClass, 'path': path, 'callback': callback} kwargs = {k: dumps(v) if type(v) is dict else v for (k, v) in kwargs.items()} param_rest = self._make_rest('path', **kwargs) url = self._basePath + '/analyzer/enrichment'.format(**kwargs) requests_params = {k: v for (k, v) in kwargs.items() if k != 'path'} return self._get('GET', url, requests_params, output)
def unpack_message(buffer): """Unpack the whole buffer, including header pack. Args: buffer (bytes): Bytes representation of a openflow message. Returns: object: Instance of openflow message. """ hdr_size = Header().get_size() hdr_buff, msg_buff = buffer[:hdr_size], buffer[hdr_size:] header = Header() header.unpack(hdr_buff) message = new_message_from_header(header) message.unpack(msg_buff) return message
def function[unpack_message, parameter[buffer]]: constant[Unpack the whole buffer, including header pack. Args: buffer (bytes): Bytes representation of a openflow message. Returns: object: Instance of openflow message. ] variable[hdr_size] assign[=] call[call[name[Header], parameter[]].get_size, parameter[]] <ast.Tuple object at 0x7da20e9b0670> assign[=] tuple[[<ast.Subscript object at 0x7da20e9b0a00>, <ast.Subscript object at 0x7da20e9b1c90>]] variable[header] assign[=] call[name[Header], parameter[]] call[name[header].unpack, parameter[name[hdr_buff]]] variable[message] assign[=] call[name[new_message_from_header], parameter[name[header]]] call[name[message].unpack, parameter[name[msg_buff]]] return[name[message]]
keyword[def] identifier[unpack_message] ( identifier[buffer] ): literal[string] identifier[hdr_size] = identifier[Header] (). identifier[get_size] () identifier[hdr_buff] , identifier[msg_buff] = identifier[buffer] [: identifier[hdr_size] ], identifier[buffer] [ identifier[hdr_size] :] identifier[header] = identifier[Header] () identifier[header] . identifier[unpack] ( identifier[hdr_buff] ) identifier[message] = identifier[new_message_from_header] ( identifier[header] ) identifier[message] . identifier[unpack] ( identifier[msg_buff] ) keyword[return] identifier[message]
def unpack_message(buffer): """Unpack the whole buffer, including header pack. Args: buffer (bytes): Bytes representation of a openflow message. Returns: object: Instance of openflow message. """ hdr_size = Header().get_size() (hdr_buff, msg_buff) = (buffer[:hdr_size], buffer[hdr_size:]) header = Header() header.unpack(hdr_buff) message = new_message_from_header(header) message.unpack(msg_buff) return message
def delete_async(self, url, name, callback=None, params=None, headers=None): """ Asynchronous DELETE request with the process pool. """ if not name: name = '' params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) process_pool.apply_async(make_delete_request, args=(endpoint, params, headers), callback=callback)
def function[delete_async, parameter[self, url, name, callback, params, headers]]: constant[ Asynchronous DELETE request with the process pool. ] if <ast.UnaryOp object at 0x7da204963070> begin[:] variable[name] assign[=] constant[] variable[params] assign[=] <ast.BoolOp object at 0x7da204963bb0> variable[headers] assign[=] <ast.BoolOp object at 0x7da204961570> variable[endpoint] assign[=] call[name[self]._build_endpoint_url, parameter[name[url], name[name]]] call[name[self]._authenticate, parameter[name[params], name[headers]]] call[name[process_pool].apply_async, parameter[name[make_delete_request]]]
keyword[def] identifier[delete_async] ( identifier[self] , identifier[url] , identifier[name] , identifier[callback] = keyword[None] , identifier[params] = keyword[None] , identifier[headers] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[name] : identifier[name] = literal[string] identifier[params] = identifier[params] keyword[or] {} identifier[headers] = identifier[headers] keyword[or] {} identifier[endpoint] = identifier[self] . identifier[_build_endpoint_url] ( identifier[url] , identifier[name] ) identifier[self] . identifier[_authenticate] ( identifier[params] , identifier[headers] ) identifier[process_pool] . identifier[apply_async] ( identifier[make_delete_request] , identifier[args] =( identifier[endpoint] , identifier[params] , identifier[headers] ), identifier[callback] = identifier[callback] )
def delete_async(self, url, name, callback=None, params=None, headers=None): """ Asynchronous DELETE request with the process pool. """ if not name: name = '' # depends on [control=['if'], data=[]] params = params or {} headers = headers or {} endpoint = self._build_endpoint_url(url, name) self._authenticate(params, headers) process_pool.apply_async(make_delete_request, args=(endpoint, params, headers), callback=callback)
def add_samples(self, samples: Iterable[Sample]) -> None: """Add samples in an iterable to this :class:`SampleSheet`.""" for sample in samples: self.add_sample(sample)
def function[add_samples, parameter[self, samples]]: constant[Add samples in an iterable to this :class:`SampleSheet`.] for taget[name[sample]] in starred[name[samples]] begin[:] call[name[self].add_sample, parameter[name[sample]]]
keyword[def] identifier[add_samples] ( identifier[self] , identifier[samples] : identifier[Iterable] [ identifier[Sample] ])-> keyword[None] : literal[string] keyword[for] identifier[sample] keyword[in] identifier[samples] : identifier[self] . identifier[add_sample] ( identifier[sample] )
def add_samples(self, samples: Iterable[Sample]) -> None: """Add samples in an iterable to this :class:`SampleSheet`.""" for sample in samples: self.add_sample(sample) # depends on [control=['for'], data=['sample']]
def indexSearch(self, indexes): """Filters the data by a list of indexes. Args: indexes (list of int): List of index numbers to return. Returns: list: A list containing all indexes with filtered data. Matches will be `True`, the remaining items will be `False`. If the dataFrame is empty, an empty list will be returned. """ if not self._dataFrame.empty: filter0 = self._dataFrame.index == -9999 for index in indexes: filter1 = self._dataFrame.index == index filter0 = np.logical_or(filter0, filter1) return filter0 else: return []
def function[indexSearch, parameter[self, indexes]]: constant[Filters the data by a list of indexes. Args: indexes (list of int): List of index numbers to return. Returns: list: A list containing all indexes with filtered data. Matches will be `True`, the remaining items will be `False`. If the dataFrame is empty, an empty list will be returned. ] if <ast.UnaryOp object at 0x7da1b0772080> begin[:] variable[filter0] assign[=] compare[name[self]._dataFrame.index equal[==] <ast.UnaryOp object at 0x7da1b0770a30>] for taget[name[index]] in starred[name[indexes]] begin[:] variable[filter1] assign[=] compare[name[self]._dataFrame.index equal[==] name[index]] variable[filter0] assign[=] call[name[np].logical_or, parameter[name[filter0], name[filter1]]] return[name[filter0]]
keyword[def] identifier[indexSearch] ( identifier[self] , identifier[indexes] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_dataFrame] . identifier[empty] : identifier[filter0] = identifier[self] . identifier[_dataFrame] . identifier[index] ==- literal[int] keyword[for] identifier[index] keyword[in] identifier[indexes] : identifier[filter1] = identifier[self] . identifier[_dataFrame] . identifier[index] == identifier[index] identifier[filter0] = identifier[np] . identifier[logical_or] ( identifier[filter0] , identifier[filter1] ) keyword[return] identifier[filter0] keyword[else] : keyword[return] []
def indexSearch(self, indexes): """Filters the data by a list of indexes. Args: indexes (list of int): List of index numbers to return. Returns: list: A list containing all indexes with filtered data. Matches will be `True`, the remaining items will be `False`. If the dataFrame is empty, an empty list will be returned. """ if not self._dataFrame.empty: filter0 = self._dataFrame.index == -9999 for index in indexes: filter1 = self._dataFrame.index == index filter0 = np.logical_or(filter0, filter1) # depends on [control=['for'], data=['index']] return filter0 # depends on [control=['if'], data=[]] else: return []
def get_txn(self, txn_name, txn_uri): ''' Retrieves known transaction and adds to self.txns. TODO: Perhaps this should send a keep-alive request as well? Obviously still needed, and would reset timer. Args: txn_prefix (str, rdflib.term.URIRef): uri of the transaction. e.g. http://localhost:8080/rest/txn:123456789 txn_name (str): local, human name for transaction Return: (Transaction) local instance of transactions from self.txns[txn_uri] ''' # parse uri txn_uri = self.parse_uri(txn_uri) # request new transaction txn_response = self.api.http_request('GET',txn_uri, data=None, headers=None) # if 200, transaction exists if txn_response.status_code == 200: logger.debug("transactoin found: %s" % txn_uri) # init new Transaction, and pass Expires header txn = Transaction( self, # pass the repository txn_name, txn_uri, expires = None) # append to self self.txns[txn_name] = txn # return return txn # if 404, transaction does not exist elif txn_response.status_code in [404, 410]: logger.debug("transaction does not exist: %s" % txn_uri) return False else: raise Exception('HTTP %s, could not retrieve transaction' % txn_response.status_code)
def function[get_txn, parameter[self, txn_name, txn_uri]]: constant[ Retrieves known transaction and adds to self.txns. TODO: Perhaps this should send a keep-alive request as well? Obviously still needed, and would reset timer. Args: txn_prefix (str, rdflib.term.URIRef): uri of the transaction. e.g. http://localhost:8080/rest/txn:123456789 txn_name (str): local, human name for transaction Return: (Transaction) local instance of transactions from self.txns[txn_uri] ] variable[txn_uri] assign[=] call[name[self].parse_uri, parameter[name[txn_uri]]] variable[txn_response] assign[=] call[name[self].api.http_request, parameter[constant[GET], name[txn_uri]]] if compare[name[txn_response].status_code equal[==] constant[200]] begin[:] call[name[logger].debug, parameter[binary_operation[constant[transactoin found: %s] <ast.Mod object at 0x7da2590d6920> name[txn_uri]]]] variable[txn] assign[=] call[name[Transaction], parameter[name[self], name[txn_name], name[txn_uri]]] call[name[self].txns][name[txn_name]] assign[=] name[txn] return[name[txn]]
keyword[def] identifier[get_txn] ( identifier[self] , identifier[txn_name] , identifier[txn_uri] ): literal[string] identifier[txn_uri] = identifier[self] . identifier[parse_uri] ( identifier[txn_uri] ) identifier[txn_response] = identifier[self] . identifier[api] . identifier[http_request] ( literal[string] , identifier[txn_uri] , identifier[data] = keyword[None] , identifier[headers] = keyword[None] ) keyword[if] identifier[txn_response] . identifier[status_code] == literal[int] : identifier[logger] . identifier[debug] ( literal[string] % identifier[txn_uri] ) identifier[txn] = identifier[Transaction] ( identifier[self] , identifier[txn_name] , identifier[txn_uri] , identifier[expires] = keyword[None] ) identifier[self] . identifier[txns] [ identifier[txn_name] ]= identifier[txn] keyword[return] identifier[txn] keyword[elif] identifier[txn_response] . identifier[status_code] keyword[in] [ literal[int] , literal[int] ]: identifier[logger] . identifier[debug] ( literal[string] % identifier[txn_uri] ) keyword[return] keyword[False] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] % identifier[txn_response] . identifier[status_code] )
def get_txn(self, txn_name, txn_uri): """ Retrieves known transaction and adds to self.txns. TODO: Perhaps this should send a keep-alive request as well? Obviously still needed, and would reset timer. Args: txn_prefix (str, rdflib.term.URIRef): uri of the transaction. e.g. http://localhost:8080/rest/txn:123456789 txn_name (str): local, human name for transaction Return: (Transaction) local instance of transactions from self.txns[txn_uri] """ # parse uri txn_uri = self.parse_uri(txn_uri) # request new transaction txn_response = self.api.http_request('GET', txn_uri, data=None, headers=None) # if 200, transaction exists if txn_response.status_code == 200: logger.debug('transactoin found: %s' % txn_uri) # init new Transaction, and pass Expires header # pass the repository txn = Transaction(self, txn_name, txn_uri, expires=None) # append to self self.txns[txn_name] = txn # return return txn # depends on [control=['if'], data=[]] # if 404, transaction does not exist elif txn_response.status_code in [404, 410]: logger.debug('transaction does not exist: %s' % txn_uri) return False # depends on [control=['if'], data=[]] else: raise Exception('HTTP %s, could not retrieve transaction' % txn_response.status_code)
def add_helpingmaterials(config, helping_materials_file, helping_type): """Add helping materials to a project.""" res = _add_helpingmaterials(config, helping_materials_file, helping_type) click.echo(res)
def function[add_helpingmaterials, parameter[config, helping_materials_file, helping_type]]: constant[Add helping materials to a project.] variable[res] assign[=] call[name[_add_helpingmaterials], parameter[name[config], name[helping_materials_file], name[helping_type]]] call[name[click].echo, parameter[name[res]]]
keyword[def] identifier[add_helpingmaterials] ( identifier[config] , identifier[helping_materials_file] , identifier[helping_type] ): literal[string] identifier[res] = identifier[_add_helpingmaterials] ( identifier[config] , identifier[helping_materials_file] , identifier[helping_type] ) identifier[click] . identifier[echo] ( identifier[res] )
def add_helpingmaterials(config, helping_materials_file, helping_type): """Add helping materials to a project.""" res = _add_helpingmaterials(config, helping_materials_file, helping_type) click.echo(res)
def reduce_stack(array3D, z_function): """Return 2D array projection of the input 3D array. The input function is applied to each line of an input x, y value. :param array3D: 3D numpy.array :param z_function: function to use for the projection (e.g. :func:`max`) """ xmax, ymax, _ = array3D.shape projection = np.zeros((xmax, ymax), dtype=array3D.dtype) for x in range(xmax): for y in range(ymax): projection[x, y] = z_function(array3D[x, y, :]) return projection
def function[reduce_stack, parameter[array3D, z_function]]: constant[Return 2D array projection of the input 3D array. The input function is applied to each line of an input x, y value. :param array3D: 3D numpy.array :param z_function: function to use for the projection (e.g. :func:`max`) ] <ast.Tuple object at 0x7da1b1f20400> assign[=] name[array3D].shape variable[projection] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b1f20100>, <ast.Name object at 0x7da1b1f21600>]]]] for taget[name[x]] in starred[call[name[range], parameter[name[xmax]]]] begin[:] for taget[name[y]] in starred[call[name[range], parameter[name[ymax]]]] begin[:] call[name[projection]][tuple[[<ast.Name object at 0x7da1b1ff2110>, <ast.Name object at 0x7da1b1ff10c0>]]] assign[=] call[name[z_function], parameter[call[name[array3D]][tuple[[<ast.Name object at 0x7da1b1ff0be0>, <ast.Name object at 0x7da1b1ff0280>, <ast.Slice object at 0x7da1b1ff0e50>]]]]] return[name[projection]]
keyword[def] identifier[reduce_stack] ( identifier[array3D] , identifier[z_function] ): literal[string] identifier[xmax] , identifier[ymax] , identifier[_] = identifier[array3D] . identifier[shape] identifier[projection] = identifier[np] . identifier[zeros] (( identifier[xmax] , identifier[ymax] ), identifier[dtype] = identifier[array3D] . identifier[dtype] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[xmax] ): keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[ymax] ): identifier[projection] [ identifier[x] , identifier[y] ]= identifier[z_function] ( identifier[array3D] [ identifier[x] , identifier[y] ,:]) keyword[return] identifier[projection]
def reduce_stack(array3D, z_function): """Return 2D array projection of the input 3D array. The input function is applied to each line of an input x, y value. :param array3D: 3D numpy.array :param z_function: function to use for the projection (e.g. :func:`max`) """ (xmax, ymax, _) = array3D.shape projection = np.zeros((xmax, ymax), dtype=array3D.dtype) for x in range(xmax): for y in range(ymax): projection[x, y] = z_function(array3D[x, y, :]) # depends on [control=['for'], data=['y']] # depends on [control=['for'], data=['x']] return projection
def _Open(self, path_spec=None, mode='rb'): """Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid. """ if not self._file_object_set_in_init and not path_spec: raise ValueError('Missing path specification.') if self._file_object_set_in_init: return self._file_object = self._OpenFileObject(path_spec) if not self._file_object: raise IOError('Unable to open missing file-like object.')
def function[_Open, parameter[self, path_spec, mode]]: constant[Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid. ] if <ast.BoolOp object at 0x7da1b064e530> begin[:] <ast.Raise object at 0x7da1b064c2b0> if name[self]._file_object_set_in_init begin[:] return[None] name[self]._file_object assign[=] call[name[self]._OpenFileObject, parameter[name[path_spec]]] if <ast.UnaryOp object at 0x7da1b0847610> begin[:] <ast.Raise object at 0x7da1b0846440>
keyword[def] identifier[_Open] ( identifier[self] , identifier[path_spec] = keyword[None] , identifier[mode] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_file_object_set_in_init] keyword[and] keyword[not] identifier[path_spec] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[self] . identifier[_file_object_set_in_init] : keyword[return] identifier[self] . identifier[_file_object] = identifier[self] . identifier[_OpenFileObject] ( identifier[path_spec] ) keyword[if] keyword[not] identifier[self] . identifier[_file_object] : keyword[raise] identifier[IOError] ( literal[string] )
def _Open(self, path_spec=None, mode='rb'): """Opens the file-like object defined by path specification. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid. """ if not self._file_object_set_in_init and (not path_spec): raise ValueError('Missing path specification.') # depends on [control=['if'], data=[]] if self._file_object_set_in_init: return # depends on [control=['if'], data=[]] self._file_object = self._OpenFileObject(path_spec) if not self._file_object: raise IOError('Unable to open missing file-like object.') # depends on [control=['if'], data=[]]
def list_nodes(call=None): ''' Return a list of the BareMetal servers that are on the provider. ''' if call == 'action': raise SaltCloudSystemExit( 'The list_nodes function must be called with -f or --function.' ) items = query(method='servers') ret = {} for node in items['servers']: public_ips = [] private_ips = [] image_id = '' if node.get('public_ip'): public_ips = [node['public_ip']['address']] if node.get('private_ip'): private_ips = [node['private_ip']] if node.get('image'): image_id = node['image']['id'] ret[node['name']] = { 'id': node['id'], 'image_id': image_id, 'public_ips': public_ips, 'private_ips': private_ips, 'size': node['volumes']['0']['size'], 'state': node['state'], } return ret
def function[list_nodes, parameter[call]]: constant[ Return a list of the BareMetal servers that are on the provider. ] if compare[name[call] equal[==] constant[action]] begin[:] <ast.Raise object at 0x7da2046225c0> variable[items] assign[=] call[name[query], parameter[]] variable[ret] assign[=] dictionary[[], []] for taget[name[node]] in starred[call[name[items]][constant[servers]]] begin[:] variable[public_ips] assign[=] list[[]] variable[private_ips] assign[=] list[[]] variable[image_id] assign[=] constant[] if call[name[node].get, parameter[constant[public_ip]]] begin[:] variable[public_ips] assign[=] list[[<ast.Subscript object at 0x7da204621750>]] if call[name[node].get, parameter[constant[private_ip]]] begin[:] variable[private_ips] assign[=] list[[<ast.Subscript object at 0x7da204620cd0>]] if call[name[node].get, parameter[constant[image]]] begin[:] variable[image_id] assign[=] call[call[name[node]][constant[image]]][constant[id]] call[name[ret]][call[name[node]][constant[name]]] assign[=] dictionary[[<ast.Constant object at 0x7da204621db0>, <ast.Constant object at 0x7da204623580>, <ast.Constant object at 0x7da2046210c0>, <ast.Constant object at 0x7da2046207f0>, <ast.Constant object at 0x7da2046228c0>, <ast.Constant object at 0x7da204620c70>], [<ast.Subscript object at 0x7da204620fd0>, <ast.Name object at 0x7da1b2347fd0>, <ast.Name object at 0x7da1b2344130>, <ast.Name object at 0x7da1b23441c0>, <ast.Subscript object at 0x7da1b2344bb0>, <ast.Subscript object at 0x7da1b2345b70>]] return[name[ret]]
keyword[def] identifier[list_nodes] ( identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] == literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] ) identifier[items] = identifier[query] ( identifier[method] = literal[string] ) identifier[ret] ={} keyword[for] identifier[node] keyword[in] identifier[items] [ literal[string] ]: identifier[public_ips] =[] identifier[private_ips] =[] identifier[image_id] = literal[string] keyword[if] identifier[node] . identifier[get] ( literal[string] ): identifier[public_ips] =[ identifier[node] [ literal[string] ][ literal[string] ]] keyword[if] identifier[node] . identifier[get] ( literal[string] ): identifier[private_ips] =[ identifier[node] [ literal[string] ]] keyword[if] identifier[node] . identifier[get] ( literal[string] ): identifier[image_id] = identifier[node] [ literal[string] ][ literal[string] ] identifier[ret] [ identifier[node] [ literal[string] ]]={ literal[string] : identifier[node] [ literal[string] ], literal[string] : identifier[image_id] , literal[string] : identifier[public_ips] , literal[string] : identifier[private_ips] , literal[string] : identifier[node] [ literal[string] ][ literal[string] ][ literal[string] ], literal[string] : identifier[node] [ literal[string] ], } keyword[return] identifier[ret]
def list_nodes(call=None): """ Return a list of the BareMetal servers that are on the provider. """ if call == 'action': raise SaltCloudSystemExit('The list_nodes function must be called with -f or --function.') # depends on [control=['if'], data=[]] items = query(method='servers') ret = {} for node in items['servers']: public_ips = [] private_ips = [] image_id = '' if node.get('public_ip'): public_ips = [node['public_ip']['address']] # depends on [control=['if'], data=[]] if node.get('private_ip'): private_ips = [node['private_ip']] # depends on [control=['if'], data=[]] if node.get('image'): image_id = node['image']['id'] # depends on [control=['if'], data=[]] ret[node['name']] = {'id': node['id'], 'image_id': image_id, 'public_ips': public_ips, 'private_ips': private_ips, 'size': node['volumes']['0']['size'], 'state': node['state']} # depends on [control=['for'], data=['node']] return ret
def pretokenized_t2t_dataset(dataset_name=gin.REQUIRED, text2self=False, data_dir=gin.REQUIRED, dataset_split="train", batch_size=gin.REQUIRED, sequence_length=gin.REQUIRED, vocabulary=None): """Loads the Tensor2tensor dataset specified by dataset_name. Args: dataset_name: TensorFlow Datasets dataset name. text2self: a boolean data_dir: string, data_dir for TensorFlow Datasets dataset_split: a string - "train" or "dev" batch_size: an integer sequence_length: an integer vocabulary: ignored Returns: A tf.data.Dataset of batches """ del vocabulary filepattern = os.path.join( data_dir, dataset_name + "-" + dataset_split + "-*") filenames = tf.gfile.Glob(filepattern) tf.logging.info("Found %s files matching %s" % (len(filenames), filepattern)) if not filenames: raise ValueError("No matching files found") dataset = pretokenized_tfrecord_dataset( filenames=filenames, text2self=text2self, eos_included=True, repeat=dataset_split == "train", batch_size=batch_size, sequence_length=sequence_length) if dataset_split == "train": dataset = dataset.shuffle(1000) return dataset
def function[pretokenized_t2t_dataset, parameter[dataset_name, text2self, data_dir, dataset_split, batch_size, sequence_length, vocabulary]]: constant[Loads the Tensor2tensor dataset specified by dataset_name. Args: dataset_name: TensorFlow Datasets dataset name. text2self: a boolean data_dir: string, data_dir for TensorFlow Datasets dataset_split: a string - "train" or "dev" batch_size: an integer sequence_length: an integer vocabulary: ignored Returns: A tf.data.Dataset of batches ] <ast.Delete object at 0x7da204566020> variable[filepattern] assign[=] call[name[os].path.join, parameter[name[data_dir], binary_operation[binary_operation[binary_operation[name[dataset_name] + constant[-]] + name[dataset_split]] + constant[-*]]]] variable[filenames] assign[=] call[name[tf].gfile.Glob, parameter[name[filepattern]]] call[name[tf].logging.info, parameter[binary_operation[constant[Found %s files matching %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204565810>, <ast.Name object at 0x7da204566110>]]]]] if <ast.UnaryOp object at 0x7da204567370> begin[:] <ast.Raise object at 0x7da204567df0> variable[dataset] assign[=] call[name[pretokenized_tfrecord_dataset], parameter[]] if compare[name[dataset_split] equal[==] constant[train]] begin[:] variable[dataset] assign[=] call[name[dataset].shuffle, parameter[constant[1000]]] return[name[dataset]]
keyword[def] identifier[pretokenized_t2t_dataset] ( identifier[dataset_name] = identifier[gin] . identifier[REQUIRED] , identifier[text2self] = keyword[False] , identifier[data_dir] = identifier[gin] . identifier[REQUIRED] , identifier[dataset_split] = literal[string] , identifier[batch_size] = identifier[gin] . identifier[REQUIRED] , identifier[sequence_length] = identifier[gin] . identifier[REQUIRED] , identifier[vocabulary] = keyword[None] ): literal[string] keyword[del] identifier[vocabulary] identifier[filepattern] = identifier[os] . identifier[path] . identifier[join] ( identifier[data_dir] , identifier[dataset_name] + literal[string] + identifier[dataset_split] + literal[string] ) identifier[filenames] = identifier[tf] . identifier[gfile] . identifier[Glob] ( identifier[filepattern] ) identifier[tf] . identifier[logging] . identifier[info] ( literal[string] %( identifier[len] ( identifier[filenames] ), identifier[filepattern] )) keyword[if] keyword[not] identifier[filenames] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[dataset] = identifier[pretokenized_tfrecord_dataset] ( identifier[filenames] = identifier[filenames] , identifier[text2self] = identifier[text2self] , identifier[eos_included] = keyword[True] , identifier[repeat] = identifier[dataset_split] == literal[string] , identifier[batch_size] = identifier[batch_size] , identifier[sequence_length] = identifier[sequence_length] ) keyword[if] identifier[dataset_split] == literal[string] : identifier[dataset] = identifier[dataset] . identifier[shuffle] ( literal[int] ) keyword[return] identifier[dataset]
def pretokenized_t2t_dataset(dataset_name=gin.REQUIRED, text2self=False, data_dir=gin.REQUIRED, dataset_split='train', batch_size=gin.REQUIRED, sequence_length=gin.REQUIRED, vocabulary=None): """Loads the Tensor2tensor dataset specified by dataset_name. Args: dataset_name: TensorFlow Datasets dataset name. text2self: a boolean data_dir: string, data_dir for TensorFlow Datasets dataset_split: a string - "train" or "dev" batch_size: an integer sequence_length: an integer vocabulary: ignored Returns: A tf.data.Dataset of batches """ del vocabulary filepattern = os.path.join(data_dir, dataset_name + '-' + dataset_split + '-*') filenames = tf.gfile.Glob(filepattern) tf.logging.info('Found %s files matching %s' % (len(filenames), filepattern)) if not filenames: raise ValueError('No matching files found') # depends on [control=['if'], data=[]] dataset = pretokenized_tfrecord_dataset(filenames=filenames, text2self=text2self, eos_included=True, repeat=dataset_split == 'train', batch_size=batch_size, sequence_length=sequence_length) if dataset_split == 'train': dataset = dataset.shuffle(1000) # depends on [control=['if'], data=[]] return dataset
def add_ui_from_string(self, buffer, length=-1): """add_ui_from_string(buffer, length=-1) {{ all }} """ return Gtk.UIManager.add_ui_from_string(self, buffer, length)
def function[add_ui_from_string, parameter[self, buffer, length]]: constant[add_ui_from_string(buffer, length=-1) {{ all }} ] return[call[name[Gtk].UIManager.add_ui_from_string, parameter[name[self], name[buffer], name[length]]]]
keyword[def] identifier[add_ui_from_string] ( identifier[self] , identifier[buffer] , identifier[length] =- literal[int] ): literal[string] keyword[return] identifier[Gtk] . identifier[UIManager] . identifier[add_ui_from_string] ( identifier[self] , identifier[buffer] , identifier[length] )
def add_ui_from_string(self, buffer, length=-1): """add_ui_from_string(buffer, length=-1) {{ all }} """ return Gtk.UIManager.add_ui_from_string(self, buffer, length)
def plot(args): """ %prog plot input.bed seqid Plot the matchings between the reconstructed pseudomolecules and the maps. Two types of visualizations are available in one canvas: 1. Parallel axes, and matching markers are shown in connecting lines; 2. Scatter plot. """ from jcvi.graphics.base import plt, savefig, normalize_axes, \ set2, panel_labels, shorten from jcvi.graphics.chromosome import Chromosome, GeneticMap, \ HorizontalChromosome p = OptionParser(plot.__doc__) p.add_option("--title", help="Title of the plot") add_allmaps_plot_options(p) opts, args, iopts = p.set_image_options(args, figsize="10x6") if len(args) != 2: sys.exit(not p.print_help()) inputbed, seqid = args pf = inputbed.rsplit(".", 1)[0] bedfile = pf + ".lifted.bed" agpfile = pf + ".agp" weightsfile = opts.weightsfile links = opts.links function = get_function(opts.distance) cc = Map(bedfile, function=function) allseqids = cc.seqids mapnames = cc.mapnames weights = Weights(weightsfile, mapnames) assert seqid in allseqids, "{0} not in {1}".format(seqid, allseqids) s = Scaffold(seqid, cc) mlgs = [k for k, v in s.mlg_counts.items() if v >= links] while not mlgs: links /= 2 logging.error("No markers to plot, --links reset to {0}".format(links)) mlgs = [k for k, v in s.mlg_counts.items() if v >= links] mlgsizes = {} for mlg in mlgs: mm = cc.extract_mlg(mlg) mlgsize = max(function(x) for x in mm) mlgsizes[mlg] = mlgsize fig = plt.figure(1, (iopts.w, iopts.h)) root = fig.add_axes([0, 0, 1, 1]) bbox = dict(boxstyle="round", fc='darkslategray', ec='darkslategray') if opts.title: root.text(.5, .95, opts.title, color="w", bbox=bbox, size=16) ax1 = fig.add_axes([0, 0, .5, 1]) ax2 = fig.add_axes([.5, 0, .5, 1]) # Find the layout first ystart, ystop = .9, .1 L = Layout(mlgsizes) coords = L.coords tip = .02 marker_pos = {} # Palette colors = dict((mapname, set2[i % len(set2)]) for i, mapname in enumerate(mapnames)) colors = dict((mlg, colors[mlg.split("-")[0]]) for mlg in mlgs) rhos = {} # Parallel coordinates for mlg, (x, y1, y2) in coords.items(): mm = cc.extract_mlg(mlg) markers = [(m.accn, function(m)) for m in mm] # exhaustive marker list xy = [(m.pos, function(m)) for m in mm if m.seqid == seqid] mx, my = zip(*xy) rho = spearmanr(mx, my) rhos[mlg] = rho flip = rho < 0 g = GeneticMap(ax1, x, y1, y2, markers, tip=tip, flip=flip) extra = -3 * tip if x < .5 else 3 * tip ha = "right" if x < .5 else "left" mapname = mlg.split("-")[0] tlg = shorten(mlg.replace("_", ".")) # Latex does not like underscore char label = "{0} (w={1})".format(tlg, weights[mapname]) ax1.text(x + extra, (y1 + y2) / 2, label, color=colors[mlg], ha=ha, va="center", rotation=90) marker_pos.update(g.marker_pos) agp = AGP(agpfile) agp = [x for x in agp if x.object == seqid] chrsize = max(x.object_end for x in agp) # Pseudomolecules in the center r = ystart - ystop ratio = r / chrsize f = lambda x: (ystart - ratio * x) patchstart = [f(x.object_beg) for x in agp if not x.is_gap] Chromosome(ax1, .5, ystart, ystop, width=2 * tip, patch=patchstart, lw=2) label = "{0} ({1})".format(seqid, human_size(chrsize, precision=0)) ax1.text(.5, ystart + tip, label, ha="center") scatter_data = defaultdict(list) # Connecting lines for b in s.markers: marker_name = b.accn if marker_name not in marker_pos: continue cx = .5 cy = f(b.pos) mx = coords[b.mlg][0] my = marker_pos[marker_name] extra = -tip if mx < cx else tip extra *= 1.25 # leave boundaries for aesthetic reasons cx += extra mx -= extra ax1.plot((cx, mx), (cy, my), "-", color=colors[b.mlg]) scatter_data[b.mlg].append((b.pos, function(b))) # Scatter plot, same data as parallel coordinates xstart, xstop = sorted((ystart, ystop)) f = lambda x: (xstart + ratio * x) pp = [x.object_beg for x in agp if not x.is_gap] patchstart = [f(x) for x in pp] HorizontalChromosome(ax2, xstart, xstop, ystop, height=2 * tip, patch=patchstart, lw=2) draw_gauge(ax2, xstart, chrsize) gap = .03 ratio = (r - gap * len(mlgs) - tip) / sum(mlgsizes.values()) tlgs = [] for mlg, mlgsize in sorted(mlgsizes.items()): height = ratio * mlgsize ystart -= height xx = .5 + xstart / 2 width = r / 2 color = colors[mlg] ax = fig.add_axes([xx, ystart, width, height]) ypos = ystart + height / 2 ystart -= gap sd = scatter_data[mlg] xx, yy = zip(*sd) ax.vlines(pp, 0, 2 * mlgsize, colors="beige") ax.plot(xx, yy, ".", color=color) rho = rhos[mlg] ax.text(.5, 1 - .4 * gap / height, r"$\rho$={0:.3f}".format(rho), ha="center", va="top", transform=ax.transAxes, color="gray") tlg = shorten(mlg.replace("_", ".")) tlgs.append((tlg, ypos, color)) ax.set_xlim(0, chrsize) ax.set_ylim(0, mlgsize) ax.set_xticks([]) while height / len(ax.get_yticks()) < .03 and len(ax.get_yticks()) >= 2: ax.set_yticks(ax.get_yticks()[::2]) # Sparsify the ticks yticklabels = [int(x) for x in ax.get_yticks()] ax.set_yticklabels(yticklabels, family='Helvetica') if rho < 0: ax.invert_yaxis() for i, (tlg, ypos, color) in enumerate(tlgs): ha = "center" if len(tlgs) > 4: ha = "right" if i % 2 else "left" root.text(.5, ypos, tlg, color=color, rotation=90, ha=ha, va="center") if opts.panels: labels = ((.04, .96, 'A'), (.48, .96, 'B')) panel_labels(root, labels) normalize_axes((ax1, ax2, root)) image_name = seqid + "." + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts) plt.close(fig) return image_name
def function[plot, parameter[args]]: constant[ %prog plot input.bed seqid Plot the matchings between the reconstructed pseudomolecules and the maps. Two types of visualizations are available in one canvas: 1. Parallel axes, and matching markers are shown in connecting lines; 2. Scatter plot. ] from relative_module[jcvi.graphics.base] import module[plt], module[savefig], module[normalize_axes], module[set2], module[panel_labels], module[shorten] from relative_module[jcvi.graphics.chromosome] import module[Chromosome], module[GeneticMap], module[HorizontalChromosome] variable[p] assign[=] call[name[OptionParser], parameter[name[plot].__doc__]] call[name[p].add_option, parameter[constant[--title]]] call[name[add_allmaps_plot_options], parameter[name[p]]] <ast.Tuple object at 0x7da1b23450f0> assign[=] call[name[p].set_image_options, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b2345270>]] <ast.Tuple object at 0x7da1b23469e0> assign[=] name[args] variable[pf] assign[=] call[call[name[inputbed].rsplit, parameter[constant[.], constant[1]]]][constant[0]] variable[bedfile] assign[=] binary_operation[name[pf] + constant[.lifted.bed]] variable[agpfile] assign[=] binary_operation[name[pf] + constant[.agp]] variable[weightsfile] assign[=] name[opts].weightsfile variable[links] assign[=] name[opts].links variable[function] assign[=] call[name[get_function], parameter[name[opts].distance]] variable[cc] assign[=] call[name[Map], parameter[name[bedfile]]] variable[allseqids] assign[=] name[cc].seqids variable[mapnames] assign[=] name[cc].mapnames variable[weights] assign[=] call[name[Weights], parameter[name[weightsfile], name[mapnames]]] assert[compare[name[seqid] in name[allseqids]]] variable[s] assign[=] call[name[Scaffold], parameter[name[seqid], name[cc]]] variable[mlgs] assign[=] <ast.ListComp object at 0x7da1b2345360> while <ast.UnaryOp object at 0x7da1b2344640> begin[:] <ast.AugAssign object at 0x7da1b23463b0> call[name[logging].error, parameter[call[constant[No markers to plot, --links reset to {0}].format, parameter[name[links]]]]] variable[mlgs] assign[=] <ast.ListComp object at 0x7da1b2345930> variable[mlgsizes] assign[=] dictionary[[], []] for taget[name[mlg]] in starred[name[mlgs]] begin[:] variable[mm] assign[=] call[name[cc].extract_mlg, parameter[name[mlg]]] variable[mlgsize] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b2344a30>]] call[name[mlgsizes]][name[mlg]] assign[=] name[mlgsize] variable[fig] assign[=] call[name[plt].figure, parameter[constant[1], tuple[[<ast.Attribute object at 0x7da1b2345c60>, <ast.Attribute object at 0x7da1b2344370>]]]] variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b23458a0>, <ast.Constant object at 0x7da1b2345030>, <ast.Constant object at 0x7da1b2345f30>, <ast.Constant object at 0x7da1b2347e20>]]]] variable[bbox] assign[=] call[name[dict], parameter[]] if name[opts].title begin[:] call[name[root].text, parameter[constant[0.5], constant[0.95], name[opts].title]] variable[ax1] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da2041da6e0>, <ast.Constant object at 0x7da2041d8880>, <ast.Constant object at 0x7da2041db310>, <ast.Constant object at 0x7da2041d8eb0>]]]] variable[ax2] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da2041d84f0>, <ast.Constant object at 0x7da2041db7f0>, <ast.Constant object at 0x7da2041d8d30>, <ast.Constant object at 0x7da2041da4a0>]]]] <ast.Tuple object at 0x7da2041d9840> assign[=] tuple[[<ast.Constant object at 0x7da2041da890>, <ast.Constant object at 0x7da2041d95d0>]] variable[L] assign[=] call[name[Layout], parameter[name[mlgsizes]]] variable[coords] assign[=] name[L].coords variable[tip] assign[=] constant[0.02] variable[marker_pos] assign[=] dictionary[[], []] variable[colors] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da2041d8e80>]] variable[colors] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da2041d82e0>]] variable[rhos] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da2041da1d0>, <ast.Tuple object at 0x7da2041dbcd0>]]] in starred[call[name[coords].items, parameter[]]] begin[:] variable[mm] assign[=] call[name[cc].extract_mlg, parameter[name[mlg]]] variable[markers] assign[=] <ast.ListComp object at 0x7da2041d8fa0> variable[xy] assign[=] <ast.ListComp object at 0x7da2041d9f00> <ast.Tuple object at 0x7da2041dadd0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da2041dad40>]] variable[rho] assign[=] call[name[spearmanr], parameter[name[mx], name[my]]] call[name[rhos]][name[mlg]] assign[=] name[rho] variable[flip] assign[=] compare[name[rho] less[<] constant[0]] variable[g] assign[=] call[name[GeneticMap], parameter[name[ax1], name[x], name[y1], name[y2], name[markers]]] variable[extra] assign[=] <ast.IfExp object at 0x7da2041da740> variable[ha] assign[=] <ast.IfExp object at 0x7da2041d9ea0> variable[mapname] assign[=] call[call[name[mlg].split, parameter[constant[-]]]][constant[0]] variable[tlg] assign[=] call[name[shorten], parameter[call[name[mlg].replace, parameter[constant[_], constant[.]]]]] variable[label] assign[=] call[constant[{0} (w={1})].format, parameter[name[tlg], call[name[weights]][name[mapname]]]] call[name[ax1].text, parameter[binary_operation[name[x] + name[extra]], binary_operation[binary_operation[name[y1] + name[y2]] / constant[2]], name[label]]] call[name[marker_pos].update, parameter[name[g].marker_pos]] variable[agp] assign[=] call[name[AGP], parameter[name[agpfile]]] variable[agp] assign[=] <ast.ListComp object at 0x7da2041dbe50> variable[chrsize] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da2041db4c0>]] variable[r] assign[=] binary_operation[name[ystart] - name[ystop]] variable[ratio] assign[=] binary_operation[name[r] / name[chrsize]] variable[f] assign[=] <ast.Lambda object at 0x7da2041d9630> variable[patchstart] assign[=] <ast.ListComp object at 0x7da2041d82b0> call[name[Chromosome], parameter[name[ax1], constant[0.5], name[ystart], name[ystop]]] variable[label] assign[=] call[constant[{0} ({1})].format, parameter[name[seqid], call[name[human_size], parameter[name[chrsize]]]]] call[name[ax1].text, parameter[constant[0.5], binary_operation[name[ystart] + name[tip]], name[label]]] variable[scatter_data] assign[=] call[name[defaultdict], parameter[name[list]]] for taget[name[b]] in starred[name[s].markers] begin[:] variable[marker_name] assign[=] name[b].accn if compare[name[marker_name] <ast.NotIn object at 0x7da2590d7190> name[marker_pos]] begin[:] continue variable[cx] assign[=] constant[0.5] variable[cy] assign[=] call[name[f], parameter[name[b].pos]] variable[mx] assign[=] call[call[name[coords]][name[b].mlg]][constant[0]] variable[my] assign[=] call[name[marker_pos]][name[marker_name]] variable[extra] assign[=] <ast.IfExp object at 0x7da18ede7910> <ast.AugAssign object at 0x7da18ede61a0> <ast.AugAssign object at 0x7da18ede4340> <ast.AugAssign object at 0x7da18ede4280> call[name[ax1].plot, parameter[tuple[[<ast.Name object at 0x7da18ede46d0>, <ast.Name object at 0x7da18ede4fd0>]], tuple[[<ast.Name object at 0x7da18ede6b00>, <ast.Name object at 0x7da18ede4520>]], constant[-]]] call[call[name[scatter_data]][name[b].mlg].append, parameter[tuple[[<ast.Attribute object at 0x7da18ede58a0>, <ast.Call object at 0x7da18ede5120>]]]] <ast.Tuple object at 0x7da18ede6ad0> assign[=] call[name[sorted], parameter[tuple[[<ast.Name object at 0x7da18ede78b0>, <ast.Name object at 0x7da18ede66b0>]]]] variable[f] assign[=] <ast.Lambda object at 0x7da18ede7790> variable[pp] assign[=] <ast.ListComp object at 0x7da18ede5c00> variable[patchstart] assign[=] <ast.ListComp object at 0x7da18ede5c90> call[name[HorizontalChromosome], parameter[name[ax2], name[xstart], name[xstop], name[ystop]]] call[name[draw_gauge], parameter[name[ax2], name[xstart], name[chrsize]]] variable[gap] assign[=] constant[0.03] variable[ratio] assign[=] binary_operation[binary_operation[binary_operation[name[r] - binary_operation[name[gap] * call[name[len], parameter[name[mlgs]]]]] - name[tip]] / call[name[sum], parameter[call[name[mlgsizes].values, parameter[]]]]] variable[tlgs] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18ede4be0>, <ast.Name object at 0x7da18ede63e0>]]] in starred[call[name[sorted], parameter[call[name[mlgsizes].items, parameter[]]]]] begin[:] variable[height] assign[=] binary_operation[name[ratio] * name[mlgsize]] <ast.AugAssign object at 0x7da18ede7280> variable[xx] assign[=] binary_operation[constant[0.5] + binary_operation[name[xstart] / constant[2]]] variable[width] assign[=] binary_operation[name[r] / constant[2]] variable[color] assign[=] call[name[colors]][name[mlg]] variable[ax] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Name object at 0x7da18ede45e0>, <ast.Name object at 0x7da18ede75b0>, <ast.Name object at 0x7da18ede5510>, <ast.Name object at 0x7da18ede5090>]]]] variable[ypos] assign[=] binary_operation[name[ystart] + binary_operation[name[height] / constant[2]]] <ast.AugAssign object at 0x7da18ede7040> variable[sd] assign[=] call[name[scatter_data]][name[mlg]] <ast.Tuple object at 0x7da18ede6c20> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da18ede7190>]] call[name[ax].vlines, parameter[name[pp], constant[0], binary_operation[constant[2] * name[mlgsize]]]] call[name[ax].plot, parameter[name[xx], name[yy], constant[.]]] variable[rho] assign[=] call[name[rhos]][name[mlg]] call[name[ax].text, parameter[constant[0.5], binary_operation[constant[1] - binary_operation[binary_operation[constant[0.4] * name[gap]] / name[height]]], call[constant[$\rho$={0:.3f}].format, parameter[name[rho]]]]] variable[tlg] assign[=] call[name[shorten], parameter[call[name[mlg].replace, parameter[constant[_], constant[.]]]]] call[name[tlgs].append, parameter[tuple[[<ast.Name object at 0x7da18ede76d0>, <ast.Name object at 0x7da18ede7940>, <ast.Name object at 0x7da20c9929b0>]]]] call[name[ax].set_xlim, parameter[constant[0], name[chrsize]]] call[name[ax].set_ylim, parameter[constant[0], name[mlgsize]]] call[name[ax].set_xticks, parameter[list[[]]]] while <ast.BoolOp object at 0x7da20c991ab0> begin[:] call[name[ax].set_yticks, parameter[call[call[name[ax].get_yticks, parameter[]]][<ast.Slice object at 0x7da20c993400>]]] variable[yticklabels] assign[=] <ast.ListComp object at 0x7da20c990160> call[name[ax].set_yticklabels, parameter[name[yticklabels]]] if compare[name[rho] less[<] constant[0]] begin[:] call[name[ax].invert_yaxis, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c991a50>, <ast.Tuple object at 0x7da20c992ef0>]]] in starred[call[name[enumerate], parameter[name[tlgs]]]] begin[:] variable[ha] assign[=] constant[center] if compare[call[name[len], parameter[name[tlgs]]] greater[>] constant[4]] begin[:] variable[ha] assign[=] <ast.IfExp object at 0x7da20c993250> call[name[root].text, parameter[constant[0.5], name[ypos], name[tlg]]] if name[opts].panels begin[:] variable[labels] assign[=] tuple[[<ast.Tuple object at 0x7da18f7208e0>, <ast.Tuple object at 0x7da18f723670>]] call[name[panel_labels], parameter[name[root], name[labels]]] call[name[normalize_axes], parameter[tuple[[<ast.Name object at 0x7da18f7228f0>, <ast.Name object at 0x7da18f720790>, <ast.Name object at 0x7da18f723400>]]]] variable[image_name] assign[=] binary_operation[binary_operation[name[seqid] + constant[.]] + name[iopts].format] call[name[savefig], parameter[name[image_name]]] call[name[plt].close, parameter[name[fig]]] return[name[image_name]]
keyword[def] identifier[plot] ( identifier[args] ): literal[string] keyword[from] identifier[jcvi] . identifier[graphics] . identifier[base] keyword[import] identifier[plt] , identifier[savefig] , identifier[normalize_axes] , identifier[set2] , identifier[panel_labels] , identifier[shorten] keyword[from] identifier[jcvi] . identifier[graphics] . identifier[chromosome] keyword[import] identifier[Chromosome] , identifier[GeneticMap] , identifier[HorizontalChromosome] identifier[p] = identifier[OptionParser] ( identifier[plot] . identifier[__doc__] ) identifier[p] . identifier[add_option] ( literal[string] , identifier[help] = literal[string] ) identifier[add_allmaps_plot_options] ( identifier[p] ) identifier[opts] , identifier[args] , identifier[iopts] = identifier[p] . identifier[set_image_options] ( identifier[args] , identifier[figsize] = literal[string] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[inputbed] , identifier[seqid] = identifier[args] identifier[pf] = identifier[inputbed] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ] identifier[bedfile] = identifier[pf] + literal[string] identifier[agpfile] = identifier[pf] + literal[string] identifier[weightsfile] = identifier[opts] . identifier[weightsfile] identifier[links] = identifier[opts] . identifier[links] identifier[function] = identifier[get_function] ( identifier[opts] . identifier[distance] ) identifier[cc] = identifier[Map] ( identifier[bedfile] , identifier[function] = identifier[function] ) identifier[allseqids] = identifier[cc] . identifier[seqids] identifier[mapnames] = identifier[cc] . identifier[mapnames] identifier[weights] = identifier[Weights] ( identifier[weightsfile] , identifier[mapnames] ) keyword[assert] identifier[seqid] keyword[in] identifier[allseqids] , literal[string] . identifier[format] ( identifier[seqid] , identifier[allseqids] ) identifier[s] = identifier[Scaffold] ( identifier[seqid] , identifier[cc] ) identifier[mlgs] =[ identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[s] . identifier[mlg_counts] . identifier[items] () keyword[if] identifier[v] >= identifier[links] ] keyword[while] keyword[not] identifier[mlgs] : identifier[links] /= literal[int] identifier[logging] . identifier[error] ( literal[string] . identifier[format] ( identifier[links] )) identifier[mlgs] =[ identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[s] . identifier[mlg_counts] . identifier[items] () keyword[if] identifier[v] >= identifier[links] ] identifier[mlgsizes] ={} keyword[for] identifier[mlg] keyword[in] identifier[mlgs] : identifier[mm] = identifier[cc] . identifier[extract_mlg] ( identifier[mlg] ) identifier[mlgsize] = identifier[max] ( identifier[function] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[mm] ) identifier[mlgsizes] [ identifier[mlg] ]= identifier[mlgsize] identifier[fig] = identifier[plt] . identifier[figure] ( literal[int] ,( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] )) identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[bbox] = identifier[dict] ( identifier[boxstyle] = literal[string] , identifier[fc] = literal[string] , identifier[ec] = literal[string] ) keyword[if] identifier[opts] . identifier[title] : identifier[root] . identifier[text] ( literal[int] , literal[int] , identifier[opts] . identifier[title] , identifier[color] = literal[string] , identifier[bbox] = identifier[bbox] , identifier[size] = literal[int] ) identifier[ax1] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[ax2] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[ystart] , identifier[ystop] = literal[int] , literal[int] identifier[L] = identifier[Layout] ( identifier[mlgsizes] ) identifier[coords] = identifier[L] . identifier[coords] identifier[tip] = literal[int] identifier[marker_pos] ={} identifier[colors] = identifier[dict] (( identifier[mapname] , identifier[set2] [ identifier[i] % identifier[len] ( identifier[set2] )]) keyword[for] identifier[i] , identifier[mapname] keyword[in] identifier[enumerate] ( identifier[mapnames] )) identifier[colors] = identifier[dict] (( identifier[mlg] , identifier[colors] [ identifier[mlg] . identifier[split] ( literal[string] )[ literal[int] ]]) keyword[for] identifier[mlg] keyword[in] identifier[mlgs] ) identifier[rhos] ={} keyword[for] identifier[mlg] ,( identifier[x] , identifier[y1] , identifier[y2] ) keyword[in] identifier[coords] . identifier[items] (): identifier[mm] = identifier[cc] . identifier[extract_mlg] ( identifier[mlg] ) identifier[markers] =[( identifier[m] . identifier[accn] , identifier[function] ( identifier[m] )) keyword[for] identifier[m] keyword[in] identifier[mm] ] identifier[xy] =[( identifier[m] . identifier[pos] , identifier[function] ( identifier[m] )) keyword[for] identifier[m] keyword[in] identifier[mm] keyword[if] identifier[m] . identifier[seqid] == identifier[seqid] ] identifier[mx] , identifier[my] = identifier[zip] (* identifier[xy] ) identifier[rho] = identifier[spearmanr] ( identifier[mx] , identifier[my] ) identifier[rhos] [ identifier[mlg] ]= identifier[rho] identifier[flip] = identifier[rho] < literal[int] identifier[g] = identifier[GeneticMap] ( identifier[ax1] , identifier[x] , identifier[y1] , identifier[y2] , identifier[markers] , identifier[tip] = identifier[tip] , identifier[flip] = identifier[flip] ) identifier[extra] =- literal[int] * identifier[tip] keyword[if] identifier[x] < literal[int] keyword[else] literal[int] * identifier[tip] identifier[ha] = literal[string] keyword[if] identifier[x] < literal[int] keyword[else] literal[string] identifier[mapname] = identifier[mlg] . identifier[split] ( literal[string] )[ literal[int] ] identifier[tlg] = identifier[shorten] ( identifier[mlg] . identifier[replace] ( literal[string] , literal[string] )) identifier[label] = literal[string] . identifier[format] ( identifier[tlg] , identifier[weights] [ identifier[mapname] ]) identifier[ax1] . identifier[text] ( identifier[x] + identifier[extra] ,( identifier[y1] + identifier[y2] )/ literal[int] , identifier[label] , identifier[color] = identifier[colors] [ identifier[mlg] ], identifier[ha] = identifier[ha] , identifier[va] = literal[string] , identifier[rotation] = literal[int] ) identifier[marker_pos] . identifier[update] ( identifier[g] . identifier[marker_pos] ) identifier[agp] = identifier[AGP] ( identifier[agpfile] ) identifier[agp] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[agp] keyword[if] identifier[x] . identifier[object] == identifier[seqid] ] identifier[chrsize] = identifier[max] ( identifier[x] . identifier[object_end] keyword[for] identifier[x] keyword[in] identifier[agp] ) identifier[r] = identifier[ystart] - identifier[ystop] identifier[ratio] = identifier[r] / identifier[chrsize] identifier[f] = keyword[lambda] identifier[x] :( identifier[ystart] - identifier[ratio] * identifier[x] ) identifier[patchstart] =[ identifier[f] ( identifier[x] . identifier[object_beg] ) keyword[for] identifier[x] keyword[in] identifier[agp] keyword[if] keyword[not] identifier[x] . identifier[is_gap] ] identifier[Chromosome] ( identifier[ax1] , literal[int] , identifier[ystart] , identifier[ystop] , identifier[width] = literal[int] * identifier[tip] , identifier[patch] = identifier[patchstart] , identifier[lw] = literal[int] ) identifier[label] = literal[string] . identifier[format] ( identifier[seqid] , identifier[human_size] ( identifier[chrsize] , identifier[precision] = literal[int] )) identifier[ax1] . identifier[text] ( literal[int] , identifier[ystart] + identifier[tip] , identifier[label] , identifier[ha] = literal[string] ) identifier[scatter_data] = identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[b] keyword[in] identifier[s] . identifier[markers] : identifier[marker_name] = identifier[b] . identifier[accn] keyword[if] identifier[marker_name] keyword[not] keyword[in] identifier[marker_pos] : keyword[continue] identifier[cx] = literal[int] identifier[cy] = identifier[f] ( identifier[b] . identifier[pos] ) identifier[mx] = identifier[coords] [ identifier[b] . identifier[mlg] ][ literal[int] ] identifier[my] = identifier[marker_pos] [ identifier[marker_name] ] identifier[extra] =- identifier[tip] keyword[if] identifier[mx] < identifier[cx] keyword[else] identifier[tip] identifier[extra] *= literal[int] identifier[cx] += identifier[extra] identifier[mx] -= identifier[extra] identifier[ax1] . identifier[plot] (( identifier[cx] , identifier[mx] ),( identifier[cy] , identifier[my] ), literal[string] , identifier[color] = identifier[colors] [ identifier[b] . identifier[mlg] ]) identifier[scatter_data] [ identifier[b] . identifier[mlg] ]. identifier[append] (( identifier[b] . identifier[pos] , identifier[function] ( identifier[b] ))) identifier[xstart] , identifier[xstop] = identifier[sorted] (( identifier[ystart] , identifier[ystop] )) identifier[f] = keyword[lambda] identifier[x] :( identifier[xstart] + identifier[ratio] * identifier[x] ) identifier[pp] =[ identifier[x] . identifier[object_beg] keyword[for] identifier[x] keyword[in] identifier[agp] keyword[if] keyword[not] identifier[x] . identifier[is_gap] ] identifier[patchstart] =[ identifier[f] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[pp] ] identifier[HorizontalChromosome] ( identifier[ax2] , identifier[xstart] , identifier[xstop] , identifier[ystop] , identifier[height] = literal[int] * identifier[tip] , identifier[patch] = identifier[patchstart] , identifier[lw] = literal[int] ) identifier[draw_gauge] ( identifier[ax2] , identifier[xstart] , identifier[chrsize] ) identifier[gap] = literal[int] identifier[ratio] =( identifier[r] - identifier[gap] * identifier[len] ( identifier[mlgs] )- identifier[tip] )/ identifier[sum] ( identifier[mlgsizes] . identifier[values] ()) identifier[tlgs] =[] keyword[for] identifier[mlg] , identifier[mlgsize] keyword[in] identifier[sorted] ( identifier[mlgsizes] . identifier[items] ()): identifier[height] = identifier[ratio] * identifier[mlgsize] identifier[ystart] -= identifier[height] identifier[xx] = literal[int] + identifier[xstart] / literal[int] identifier[width] = identifier[r] / literal[int] identifier[color] = identifier[colors] [ identifier[mlg] ] identifier[ax] = identifier[fig] . identifier[add_axes] ([ identifier[xx] , identifier[ystart] , identifier[width] , identifier[height] ]) identifier[ypos] = identifier[ystart] + identifier[height] / literal[int] identifier[ystart] -= identifier[gap] identifier[sd] = identifier[scatter_data] [ identifier[mlg] ] identifier[xx] , identifier[yy] = identifier[zip] (* identifier[sd] ) identifier[ax] . identifier[vlines] ( identifier[pp] , literal[int] , literal[int] * identifier[mlgsize] , identifier[colors] = literal[string] ) identifier[ax] . identifier[plot] ( identifier[xx] , identifier[yy] , literal[string] , identifier[color] = identifier[color] ) identifier[rho] = identifier[rhos] [ identifier[mlg] ] identifier[ax] . identifier[text] ( literal[int] , literal[int] - literal[int] * identifier[gap] / identifier[height] , literal[string] . identifier[format] ( identifier[rho] ), identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[transform] = identifier[ax] . identifier[transAxes] , identifier[color] = literal[string] ) identifier[tlg] = identifier[shorten] ( identifier[mlg] . identifier[replace] ( literal[string] , literal[string] )) identifier[tlgs] . identifier[append] (( identifier[tlg] , identifier[ypos] , identifier[color] )) identifier[ax] . identifier[set_xlim] ( literal[int] , identifier[chrsize] ) identifier[ax] . identifier[set_ylim] ( literal[int] , identifier[mlgsize] ) identifier[ax] . identifier[set_xticks] ([]) keyword[while] identifier[height] / identifier[len] ( identifier[ax] . identifier[get_yticks] ())< literal[int] keyword[and] identifier[len] ( identifier[ax] . identifier[get_yticks] ())>= literal[int] : identifier[ax] . identifier[set_yticks] ( identifier[ax] . identifier[get_yticks] ()[:: literal[int] ]) identifier[yticklabels] =[ identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[ax] . identifier[get_yticks] ()] identifier[ax] . identifier[set_yticklabels] ( identifier[yticklabels] , identifier[family] = literal[string] ) keyword[if] identifier[rho] < literal[int] : identifier[ax] . identifier[invert_yaxis] () keyword[for] identifier[i] ,( identifier[tlg] , identifier[ypos] , identifier[color] ) keyword[in] identifier[enumerate] ( identifier[tlgs] ): identifier[ha] = literal[string] keyword[if] identifier[len] ( identifier[tlgs] )> literal[int] : identifier[ha] = literal[string] keyword[if] identifier[i] % literal[int] keyword[else] literal[string] identifier[root] . identifier[text] ( literal[int] , identifier[ypos] , identifier[tlg] , identifier[color] = identifier[color] , identifier[rotation] = literal[int] , identifier[ha] = identifier[ha] , identifier[va] = literal[string] ) keyword[if] identifier[opts] . identifier[panels] : identifier[labels] =(( literal[int] , literal[int] , literal[string] ),( literal[int] , literal[int] , literal[string] )) identifier[panel_labels] ( identifier[root] , identifier[labels] ) identifier[normalize_axes] (( identifier[ax1] , identifier[ax2] , identifier[root] )) identifier[image_name] = identifier[seqid] + literal[string] + identifier[iopts] . identifier[format] identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[iopts] . identifier[dpi] , identifier[iopts] = identifier[iopts] ) identifier[plt] . identifier[close] ( identifier[fig] ) keyword[return] identifier[image_name]
def plot(args): """ %prog plot input.bed seqid Plot the matchings between the reconstructed pseudomolecules and the maps. Two types of visualizations are available in one canvas: 1. Parallel axes, and matching markers are shown in connecting lines; 2. Scatter plot. """ from jcvi.graphics.base import plt, savefig, normalize_axes, set2, panel_labels, shorten from jcvi.graphics.chromosome import Chromosome, GeneticMap, HorizontalChromosome p = OptionParser(plot.__doc__) p.add_option('--title', help='Title of the plot') add_allmaps_plot_options(p) (opts, args, iopts) = p.set_image_options(args, figsize='10x6') if len(args) != 2: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (inputbed, seqid) = args pf = inputbed.rsplit('.', 1)[0] bedfile = pf + '.lifted.bed' agpfile = pf + '.agp' weightsfile = opts.weightsfile links = opts.links function = get_function(opts.distance) cc = Map(bedfile, function=function) allseqids = cc.seqids mapnames = cc.mapnames weights = Weights(weightsfile, mapnames) assert seqid in allseqids, '{0} not in {1}'.format(seqid, allseqids) s = Scaffold(seqid, cc) mlgs = [k for (k, v) in s.mlg_counts.items() if v >= links] while not mlgs: links /= 2 logging.error('No markers to plot, --links reset to {0}'.format(links)) mlgs = [k for (k, v) in s.mlg_counts.items() if v >= links] # depends on [control=['while'], data=[]] mlgsizes = {} for mlg in mlgs: mm = cc.extract_mlg(mlg) mlgsize = max((function(x) for x in mm)) mlgsizes[mlg] = mlgsize # depends on [control=['for'], data=['mlg']] fig = plt.figure(1, (iopts.w, iopts.h)) root = fig.add_axes([0, 0, 1, 1]) bbox = dict(boxstyle='round', fc='darkslategray', ec='darkslategray') if opts.title: root.text(0.5, 0.95, opts.title, color='w', bbox=bbox, size=16) # depends on [control=['if'], data=[]] ax1 = fig.add_axes([0, 0, 0.5, 1]) ax2 = fig.add_axes([0.5, 0, 0.5, 1]) # Find the layout first (ystart, ystop) = (0.9, 0.1) L = Layout(mlgsizes) coords = L.coords tip = 0.02 marker_pos = {} # Palette colors = dict(((mapname, set2[i % len(set2)]) for (i, mapname) in enumerate(mapnames))) colors = dict(((mlg, colors[mlg.split('-')[0]]) for mlg in mlgs)) rhos = {} # Parallel coordinates for (mlg, (x, y1, y2)) in coords.items(): mm = cc.extract_mlg(mlg) markers = [(m.accn, function(m)) for m in mm] # exhaustive marker list xy = [(m.pos, function(m)) for m in mm if m.seqid == seqid] (mx, my) = zip(*xy) rho = spearmanr(mx, my) rhos[mlg] = rho flip = rho < 0 g = GeneticMap(ax1, x, y1, y2, markers, tip=tip, flip=flip) extra = -3 * tip if x < 0.5 else 3 * tip ha = 'right' if x < 0.5 else 'left' mapname = mlg.split('-')[0] tlg = shorten(mlg.replace('_', '.')) # Latex does not like underscore char label = '{0} (w={1})'.format(tlg, weights[mapname]) ax1.text(x + extra, (y1 + y2) / 2, label, color=colors[mlg], ha=ha, va='center', rotation=90) marker_pos.update(g.marker_pos) # depends on [control=['for'], data=[]] agp = AGP(agpfile) agp = [x for x in agp if x.object == seqid] chrsize = max((x.object_end for x in agp)) # Pseudomolecules in the center r = ystart - ystop ratio = r / chrsize f = lambda x: ystart - ratio * x patchstart = [f(x.object_beg) for x in agp if not x.is_gap] Chromosome(ax1, 0.5, ystart, ystop, width=2 * tip, patch=patchstart, lw=2) label = '{0} ({1})'.format(seqid, human_size(chrsize, precision=0)) ax1.text(0.5, ystart + tip, label, ha='center') scatter_data = defaultdict(list) # Connecting lines for b in s.markers: marker_name = b.accn if marker_name not in marker_pos: continue # depends on [control=['if'], data=[]] cx = 0.5 cy = f(b.pos) mx = coords[b.mlg][0] my = marker_pos[marker_name] extra = -tip if mx < cx else tip extra *= 1.25 # leave boundaries for aesthetic reasons cx += extra mx -= extra ax1.plot((cx, mx), (cy, my), '-', color=colors[b.mlg]) scatter_data[b.mlg].append((b.pos, function(b))) # depends on [control=['for'], data=['b']] # Scatter plot, same data as parallel coordinates (xstart, xstop) = sorted((ystart, ystop)) f = lambda x: xstart + ratio * x pp = [x.object_beg for x in agp if not x.is_gap] patchstart = [f(x) for x in pp] HorizontalChromosome(ax2, xstart, xstop, ystop, height=2 * tip, patch=patchstart, lw=2) draw_gauge(ax2, xstart, chrsize) gap = 0.03 ratio = (r - gap * len(mlgs) - tip) / sum(mlgsizes.values()) tlgs = [] for (mlg, mlgsize) in sorted(mlgsizes.items()): height = ratio * mlgsize ystart -= height xx = 0.5 + xstart / 2 width = r / 2 color = colors[mlg] ax = fig.add_axes([xx, ystart, width, height]) ypos = ystart + height / 2 ystart -= gap sd = scatter_data[mlg] (xx, yy) = zip(*sd) ax.vlines(pp, 0, 2 * mlgsize, colors='beige') ax.plot(xx, yy, '.', color=color) rho = rhos[mlg] ax.text(0.5, 1 - 0.4 * gap / height, '$\\rho$={0:.3f}'.format(rho), ha='center', va='top', transform=ax.transAxes, color='gray') tlg = shorten(mlg.replace('_', '.')) tlgs.append((tlg, ypos, color)) ax.set_xlim(0, chrsize) ax.set_ylim(0, mlgsize) ax.set_xticks([]) while height / len(ax.get_yticks()) < 0.03 and len(ax.get_yticks()) >= 2: ax.set_yticks(ax.get_yticks()[::2]) # Sparsify the ticks # depends on [control=['while'], data=[]] yticklabels = [int(x) for x in ax.get_yticks()] ax.set_yticklabels(yticklabels, family='Helvetica') if rho < 0: ax.invert_yaxis() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] for (i, (tlg, ypos, color)) in enumerate(tlgs): ha = 'center' if len(tlgs) > 4: ha = 'right' if i % 2 else 'left' # depends on [control=['if'], data=[]] root.text(0.5, ypos, tlg, color=color, rotation=90, ha=ha, va='center') # depends on [control=['for'], data=[]] if opts.panels: labels = ((0.04, 0.96, 'A'), (0.48, 0.96, 'B')) panel_labels(root, labels) # depends on [control=['if'], data=[]] normalize_axes((ax1, ax2, root)) image_name = seqid + '.' + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts) plt.close(fig) return image_name
def _generate_bearer_token(consumer_key, consumer_secret): """ Return the bearer token for a given pair of consumer key and secret values. """ data = [('grant_type', 'client_credentials')] resp = requests.post(OAUTH_ENDPOINT, data=data, auth=(consumer_key, consumer_secret)) logger.warning("Grabbing bearer token from OAUTH") if resp.status_code >= 400: logger.error(resp.text) resp.raise_for_status() return resp.json()['access_token']
def function[_generate_bearer_token, parameter[consumer_key, consumer_secret]]: constant[ Return the bearer token for a given pair of consumer key and secret values. ] variable[data] assign[=] list[[<ast.Tuple object at 0x7da1b12771c0>]] variable[resp] assign[=] call[name[requests].post, parameter[name[OAUTH_ENDPOINT]]] call[name[logger].warning, parameter[constant[Grabbing bearer token from OAUTH]]] if compare[name[resp].status_code greater_or_equal[>=] constant[400]] begin[:] call[name[logger].error, parameter[name[resp].text]] call[name[resp].raise_for_status, parameter[]] return[call[call[name[resp].json, parameter[]]][constant[access_token]]]
keyword[def] identifier[_generate_bearer_token] ( identifier[consumer_key] , identifier[consumer_secret] ): literal[string] identifier[data] =[( literal[string] , literal[string] )] identifier[resp] = identifier[requests] . identifier[post] ( identifier[OAUTH_ENDPOINT] , identifier[data] = identifier[data] , identifier[auth] =( identifier[consumer_key] , identifier[consumer_secret] )) identifier[logger] . identifier[warning] ( literal[string] ) keyword[if] identifier[resp] . identifier[status_code] >= literal[int] : identifier[logger] . identifier[error] ( identifier[resp] . identifier[text] ) identifier[resp] . identifier[raise_for_status] () keyword[return] identifier[resp] . identifier[json] ()[ literal[string] ]
def _generate_bearer_token(consumer_key, consumer_secret): """ Return the bearer token for a given pair of consumer key and secret values. """ data = [('grant_type', 'client_credentials')] resp = requests.post(OAUTH_ENDPOINT, data=data, auth=(consumer_key, consumer_secret)) logger.warning('Grabbing bearer token from OAUTH') if resp.status_code >= 400: logger.error(resp.text) resp.raise_for_status() # depends on [control=['if'], data=[]] return resp.json()['access_token']
def timezone(self): """ Return timezone. Offset from UTC. """ date = self.message.get('date') timezone = 0 try: _, timezone = convert_mail_date(date) finally: return timezone
def function[timezone, parameter[self]]: constant[ Return timezone. Offset from UTC. ] variable[date] assign[=] call[name[self].message.get, parameter[constant[date]]] variable[timezone] assign[=] constant[0] <ast.Try object at 0x7da1b0844340>
keyword[def] identifier[timezone] ( identifier[self] ): literal[string] identifier[date] = identifier[self] . identifier[message] . identifier[get] ( literal[string] ) identifier[timezone] = literal[int] keyword[try] : identifier[_] , identifier[timezone] = identifier[convert_mail_date] ( identifier[date] ) keyword[finally] : keyword[return] identifier[timezone]
def timezone(self): """ Return timezone. Offset from UTC. """ date = self.message.get('date') timezone = 0 try: (_, timezone) = convert_mail_date(date) # depends on [control=['try'], data=[]] finally: return timezone
def rename(self, from_path, to_path): """ Rename file. :type from_path: str :param from_path: the path of the source file :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~exceptions.IOError` """ _complain_ifclosed(self.closed) return self.fs.rename(from_path, to_path)
def function[rename, parameter[self, from_path, to_path]]: constant[ Rename file. :type from_path: str :param from_path: the path of the source file :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~exceptions.IOError` ] call[name[_complain_ifclosed], parameter[name[self].closed]] return[call[name[self].fs.rename, parameter[name[from_path], name[to_path]]]]
keyword[def] identifier[rename] ( identifier[self] , identifier[from_path] , identifier[to_path] ): literal[string] identifier[_complain_ifclosed] ( identifier[self] . identifier[closed] ) keyword[return] identifier[self] . identifier[fs] . identifier[rename] ( identifier[from_path] , identifier[to_path] )
def rename(self, from_path, to_path): """ Rename file. :type from_path: str :param from_path: the path of the source file :type to_path: str :param to_path: the path of the destination file :raises: :exc:`~exceptions.IOError` """ _complain_ifclosed(self.closed) return self.fs.rename(from_path, to_path)
def add_prefix(self, prefix, flags, prf): """Add network prefix. Args: prefix (str): network prefix. flags (str): network prefix flags, please refer thread documentation for details prf (str): network prf, please refer thread documentation for details """ self._req('prefix add %s %s %s' % (prefix, flags, prf)) time.sleep(1) self._req('netdataregister')
def function[add_prefix, parameter[self, prefix, flags, prf]]: constant[Add network prefix. Args: prefix (str): network prefix. flags (str): network prefix flags, please refer thread documentation for details prf (str): network prf, please refer thread documentation for details ] call[name[self]._req, parameter[binary_operation[constant[prefix add %s %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1c9bd30>, <ast.Name object at 0x7da1b1c995d0>, <ast.Name object at 0x7da1b1c98d60>]]]]] call[name[time].sleep, parameter[constant[1]]] call[name[self]._req, parameter[constant[netdataregister]]]
keyword[def] identifier[add_prefix] ( identifier[self] , identifier[prefix] , identifier[flags] , identifier[prf] ): literal[string] identifier[self] . identifier[_req] ( literal[string] %( identifier[prefix] , identifier[flags] , identifier[prf] )) identifier[time] . identifier[sleep] ( literal[int] ) identifier[self] . identifier[_req] ( literal[string] )
def add_prefix(self, prefix, flags, prf): """Add network prefix. Args: prefix (str): network prefix. flags (str): network prefix flags, please refer thread documentation for details prf (str): network prf, please refer thread documentation for details """ self._req('prefix add %s %s %s' % (prefix, flags, prf)) time.sleep(1) self._req('netdataregister')
def unregister_message_callback(self, type_, from_): """ Unregister a callback previously registered with :meth:`register_message_callback`. :param type_: Message type to listen for. :type type_: :class:`~.MessageType` or :data:`None` :param from_: Sender JID to listen for. :type from_: :class:`~aioxmpp.JID` or :data:`None` :raises KeyError: if no function is currently registered for the given ``(type_, from_)`` pair. :raises ValueError: if `type_` is not a valid :class:`~.MessageType` (and cannot be cast to a :class:`~.MessageType`) The match is made on the exact pair; it is not possible to unregister arbitrary listeners by passing :data:`None` to both arguments (i.e. the wildcarding only applies for receiving stanzas, not for unregistering callbacks; unregistering the super-wildcard with both arguments set to :data:`None` is of course possible). .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.MessageType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. .. deprecated:: 0.9 This method has been deprecated in favour of and is now implemented in terms of the :class:`aioxmpp.dispatcher.SimpleMessageDispatcher` service. It is equivalent to call :meth:`~.SimpleStanzaDispatcher.unregister_callback`, except that the latter is not deprecated. """ if type_ is not None: type_ = self._coerce_enum(type_, structs.MessageType) warnings.warn( "unregister_message_callback is deprecated; use " "aioxmpp.dispatcher.SimpleMessageDispatcher instead", DeprecationWarning, stacklevel=2 ) self._xxx_message_dispatcher.unregister_callback( type_, from_, )
def function[unregister_message_callback, parameter[self, type_, from_]]: constant[ Unregister a callback previously registered with :meth:`register_message_callback`. :param type_: Message type to listen for. :type type_: :class:`~.MessageType` or :data:`None` :param from_: Sender JID to listen for. :type from_: :class:`~aioxmpp.JID` or :data:`None` :raises KeyError: if no function is currently registered for the given ``(type_, from_)`` pair. :raises ValueError: if `type_` is not a valid :class:`~.MessageType` (and cannot be cast to a :class:`~.MessageType`) The match is made on the exact pair; it is not possible to unregister arbitrary listeners by passing :data:`None` to both arguments (i.e. the wildcarding only applies for receiving stanzas, not for unregistering callbacks; unregistering the super-wildcard with both arguments set to :data:`None` is of course possible). .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.MessageType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. .. deprecated:: 0.9 This method has been deprecated in favour of and is now implemented in terms of the :class:`aioxmpp.dispatcher.SimpleMessageDispatcher` service. It is equivalent to call :meth:`~.SimpleStanzaDispatcher.unregister_callback`, except that the latter is not deprecated. ] if compare[name[type_] is_not constant[None]] begin[:] variable[type_] assign[=] call[name[self]._coerce_enum, parameter[name[type_], name[structs].MessageType]] call[name[warnings].warn, parameter[constant[unregister_message_callback is deprecated; use aioxmpp.dispatcher.SimpleMessageDispatcher instead], name[DeprecationWarning]]] call[name[self]._xxx_message_dispatcher.unregister_callback, parameter[name[type_], name[from_]]]
keyword[def] identifier[unregister_message_callback] ( identifier[self] , identifier[type_] , identifier[from_] ): literal[string] keyword[if] identifier[type_] keyword[is] keyword[not] keyword[None] : identifier[type_] = identifier[self] . identifier[_coerce_enum] ( identifier[type_] , identifier[structs] . identifier[MessageType] ) identifier[warnings] . identifier[warn] ( literal[string] literal[string] , identifier[DeprecationWarning] , identifier[stacklevel] = literal[int] ) identifier[self] . identifier[_xxx_message_dispatcher] . identifier[unregister_callback] ( identifier[type_] , identifier[from_] , )
def unregister_message_callback(self, type_, from_): """ Unregister a callback previously registered with :meth:`register_message_callback`. :param type_: Message type to listen for. :type type_: :class:`~.MessageType` or :data:`None` :param from_: Sender JID to listen for. :type from_: :class:`~aioxmpp.JID` or :data:`None` :raises KeyError: if no function is currently registered for the given ``(type_, from_)`` pair. :raises ValueError: if `type_` is not a valid :class:`~.MessageType` (and cannot be cast to a :class:`~.MessageType`) The match is made on the exact pair; it is not possible to unregister arbitrary listeners by passing :data:`None` to both arguments (i.e. the wildcarding only applies for receiving stanzas, not for unregistering callbacks; unregistering the super-wildcard with both arguments set to :data:`None` is of course possible). .. versionchanged:: 0.7 The `type_` argument is now supposed to be a :class:`~.MessageType` member. .. deprecated:: 0.7 Passing a :class:`str` as `type_` argument is deprecated and will raise a :class:`TypeError` as of the 1.0 release. See the Changelog for :ref:`api-changelog-0.7` for further details on how to upgrade your code efficiently. .. deprecated:: 0.9 This method has been deprecated in favour of and is now implemented in terms of the :class:`aioxmpp.dispatcher.SimpleMessageDispatcher` service. It is equivalent to call :meth:`~.SimpleStanzaDispatcher.unregister_callback`, except that the latter is not deprecated. """ if type_ is not None: type_ = self._coerce_enum(type_, structs.MessageType) # depends on [control=['if'], data=['type_']] warnings.warn('unregister_message_callback is deprecated; use aioxmpp.dispatcher.SimpleMessageDispatcher instead', DeprecationWarning, stacklevel=2) self._xxx_message_dispatcher.unregister_callback(type_, from_)
def check_dimensional_vertical_coordinate(self, ds): ''' Check units for variables defining vertical position are valid under CF. CF §4.3.1 The units attribute for dimensional coordinates will be a string formatted as per the udunits.dat file. The acceptable units for vertical (depth or height) coordinate variables are: - units of pressure as listed in the file udunits.dat. For vertical axes the most commonly used of these include include bar, millibar, decibar, atmosphere (atm), pascal (Pa), and hPa. - units of length as listed in the file udunits.dat. For vertical axes the most commonly used of these include meter (metre, m), and kilometer (km). - other units listed in the file udunits.dat that may under certain circumstances reference vertical position such as units of density or temperature. Plural forms are also acceptable. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ''' ret_val = [] z_variables = cfutil.get_z_variables(ds) #dimless_standard_names = [name for name, regx in dimless_vertical_coordinates] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) positive = getattr(variable, 'positive', None) # Skip the variable if it's dimensionless if (hasattr(variable, 'formula_terms') or standard_name in dimless_vertical_coordinates): continue valid_vertical_coord = TestCtx(BaseCheck.HIGH, self.section_titles["4.3"]) valid_vertical_coord.assert_true(isinstance(units, basestring) and units, "§4.3.1 {}'s units must be defined for vertical coordinates, " "there is no default".format(name)) if not util.units_convertible('bar', units): valid_vertical_coord.assert_true(positive in ('up', 'down'), "{}: vertical coordinates not defining pressure must include " "a positive attribute that is either 'up' or 'down'".format(name)) # _check_valid_standard_units, part of the Chapter 3 checks, # already verifies that this coordinate has valid units ret_val.append(valid_vertical_coord.to_result()) return ret_val
def function[check_dimensional_vertical_coordinate, parameter[self, ds]]: constant[ Check units for variables defining vertical position are valid under CF. CF §4.3.1 The units attribute for dimensional coordinates will be a string formatted as per the udunits.dat file. The acceptable units for vertical (depth or height) coordinate variables are: - units of pressure as listed in the file udunits.dat. For vertical axes the most commonly used of these include include bar, millibar, decibar, atmosphere (atm), pascal (Pa), and hPa. - units of length as listed in the file udunits.dat. For vertical axes the most commonly used of these include meter (metre, m), and kilometer (km). - other units listed in the file udunits.dat that may under certain circumstances reference vertical position such as units of density or temperature. Plural forms are also acceptable. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results ] variable[ret_val] assign[=] list[[]] variable[z_variables] assign[=] call[name[cfutil].get_z_variables, parameter[name[ds]]] for taget[name[name]] in starred[name[z_variables]] begin[:] variable[variable] assign[=] call[name[ds].variables][name[name]] variable[standard_name] assign[=] call[name[getattr], parameter[name[variable], constant[standard_name], constant[None]]] variable[units] assign[=] call[name[getattr], parameter[name[variable], constant[units], constant[None]]] variable[positive] assign[=] call[name[getattr], parameter[name[variable], constant[positive], constant[None]]] if <ast.BoolOp object at 0x7da2041d90f0> begin[:] continue variable[valid_vertical_coord] assign[=] call[name[TestCtx], parameter[name[BaseCheck].HIGH, call[name[self].section_titles][constant[4.3]]]] call[name[valid_vertical_coord].assert_true, parameter[<ast.BoolOp object at 0x7da1b0b309d0>, call[constant[§4.3.1 {}'s units must be defined for vertical coordinates, there is no default].format, parameter[name[name]]]]] if <ast.UnaryOp object at 0x7da18f7224a0> begin[:] call[name[valid_vertical_coord].assert_true, parameter[compare[name[positive] in tuple[[<ast.Constant object at 0x7da18f7211e0>, <ast.Constant object at 0x7da18f720ca0>]]], call[constant[{}: vertical coordinates not defining pressure must include a positive attribute that is either 'up' or 'down'].format, parameter[name[name]]]]] call[name[ret_val].append, parameter[call[name[valid_vertical_coord].to_result, parameter[]]]] return[name[ret_val]]
keyword[def] identifier[check_dimensional_vertical_coordinate] ( identifier[self] , identifier[ds] ): literal[string] identifier[ret_val] =[] identifier[z_variables] = identifier[cfutil] . identifier[get_z_variables] ( identifier[ds] ) keyword[for] identifier[name] keyword[in] identifier[z_variables] : identifier[variable] = identifier[ds] . identifier[variables] [ identifier[name] ] identifier[standard_name] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] ) identifier[units] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] ) identifier[positive] = identifier[getattr] ( identifier[variable] , literal[string] , keyword[None] ) keyword[if] ( identifier[hasattr] ( identifier[variable] , literal[string] ) keyword[or] identifier[standard_name] keyword[in] identifier[dimless_vertical_coordinates] ): keyword[continue] identifier[valid_vertical_coord] = identifier[TestCtx] ( identifier[BaseCheck] . identifier[HIGH] , identifier[self] . identifier[section_titles] [ literal[string] ]) identifier[valid_vertical_coord] . identifier[assert_true] ( identifier[isinstance] ( identifier[units] , identifier[basestring] ) keyword[and] identifier[units] , literal[string] literal[string] . identifier[format] ( identifier[name] )) keyword[if] keyword[not] identifier[util] . identifier[units_convertible] ( literal[string] , identifier[units] ): identifier[valid_vertical_coord] . identifier[assert_true] ( identifier[positive] keyword[in] ( literal[string] , literal[string] ), literal[string] literal[string] . identifier[format] ( identifier[name] )) identifier[ret_val] . identifier[append] ( identifier[valid_vertical_coord] . identifier[to_result] ()) keyword[return] identifier[ret_val]
def check_dimensional_vertical_coordinate(self, ds): """ Check units for variables defining vertical position are valid under CF. CF §4.3.1 The units attribute for dimensional coordinates will be a string formatted as per the udunits.dat file. The acceptable units for vertical (depth or height) coordinate variables are: - units of pressure as listed in the file udunits.dat. For vertical axes the most commonly used of these include include bar, millibar, decibar, atmosphere (atm), pascal (Pa), and hPa. - units of length as listed in the file udunits.dat. For vertical axes the most commonly used of these include meter (metre, m), and kilometer (km). - other units listed in the file udunits.dat that may under certain circumstances reference vertical position such as units of density or temperature. Plural forms are also acceptable. :param netCDF4.Dataset ds: An open netCDF dataset :rtype: list :return: List of results """ ret_val = [] z_variables = cfutil.get_z_variables(ds) #dimless_standard_names = [name for name, regx in dimless_vertical_coordinates] for name in z_variables: variable = ds.variables[name] standard_name = getattr(variable, 'standard_name', None) units = getattr(variable, 'units', None) positive = getattr(variable, 'positive', None) # Skip the variable if it's dimensionless if hasattr(variable, 'formula_terms') or standard_name in dimless_vertical_coordinates: continue # depends on [control=['if'], data=[]] valid_vertical_coord = TestCtx(BaseCheck.HIGH, self.section_titles['4.3']) valid_vertical_coord.assert_true(isinstance(units, basestring) and units, "§4.3.1 {}'s units must be defined for vertical coordinates, there is no default".format(name)) if not util.units_convertible('bar', units): valid_vertical_coord.assert_true(positive in ('up', 'down'), "{}: vertical coordinates not defining pressure must include a positive attribute that is either 'up' or 'down'".format(name)) # depends on [control=['if'], data=[]] # _check_valid_standard_units, part of the Chapter 3 checks, # already verifies that this coordinate has valid units ret_val.append(valid_vertical_coord.to_result()) # depends on [control=['for'], data=['name']] return ret_val
def effect_emd(d1, d2): """Compute the EMD between two effect repertoires. Because the nodes are independent, the EMD between effect repertoires is equal to the sum of the EMDs between the marginal distributions of each node, and the EMD between marginal distribution for a node is the absolute difference in the probabilities that the node is OFF. Args: d1 (np.ndarray): The first repertoire. d2 (np.ndarray): The second repertoire. Returns: float: The EMD between ``d1`` and ``d2``. """ return sum(abs(marginal_zero(d1, i) - marginal_zero(d2, i)) for i in range(d1.ndim))
def function[effect_emd, parameter[d1, d2]]: constant[Compute the EMD between two effect repertoires. Because the nodes are independent, the EMD between effect repertoires is equal to the sum of the EMDs between the marginal distributions of each node, and the EMD between marginal distribution for a node is the absolute difference in the probabilities that the node is OFF. Args: d1 (np.ndarray): The first repertoire. d2 (np.ndarray): The second repertoire. Returns: float: The EMD between ``d1`` and ``d2``. ] return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18dc04340>]]]
keyword[def] identifier[effect_emd] ( identifier[d1] , identifier[d2] ): literal[string] keyword[return] identifier[sum] ( identifier[abs] ( identifier[marginal_zero] ( identifier[d1] , identifier[i] )- identifier[marginal_zero] ( identifier[d2] , identifier[i] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[d1] . identifier[ndim] ))
def effect_emd(d1, d2): """Compute the EMD between two effect repertoires. Because the nodes are independent, the EMD between effect repertoires is equal to the sum of the EMDs between the marginal distributions of each node, and the EMD between marginal distribution for a node is the absolute difference in the probabilities that the node is OFF. Args: d1 (np.ndarray): The first repertoire. d2 (np.ndarray): The second repertoire. Returns: float: The EMD between ``d1`` and ``d2``. """ return sum((abs(marginal_zero(d1, i) - marginal_zero(d2, i)) for i in range(d1.ndim)))
def _store_cached_zone_variable(self, zone_id, name, value): """ Stores the current known value of a zone variable into the cache. Calls any zone callbacks. """ zone_state = self._zone_state.setdefault(zone_id, {}) name = name.lower() zone_state[name] = value logger.debug("Zone Cache store %s.%s = %s", zone_id.device_str(), name, value) for callback in self._zone_callbacks: callback(zone_id, name, value)
def function[_store_cached_zone_variable, parameter[self, zone_id, name, value]]: constant[ Stores the current known value of a zone variable into the cache. Calls any zone callbacks. ] variable[zone_state] assign[=] call[name[self]._zone_state.setdefault, parameter[name[zone_id], dictionary[[], []]]] variable[name] assign[=] call[name[name].lower, parameter[]] call[name[zone_state]][name[name]] assign[=] name[value] call[name[logger].debug, parameter[constant[Zone Cache store %s.%s = %s], call[name[zone_id].device_str, parameter[]], name[name], name[value]]] for taget[name[callback]] in starred[name[self]._zone_callbacks] begin[:] call[name[callback], parameter[name[zone_id], name[name], name[value]]]
keyword[def] identifier[_store_cached_zone_variable] ( identifier[self] , identifier[zone_id] , identifier[name] , identifier[value] ): literal[string] identifier[zone_state] = identifier[self] . identifier[_zone_state] . identifier[setdefault] ( identifier[zone_id] ,{}) identifier[name] = identifier[name] . identifier[lower] () identifier[zone_state] [ identifier[name] ]= identifier[value] identifier[logger] . identifier[debug] ( literal[string] , identifier[zone_id] . identifier[device_str] (), identifier[name] , identifier[value] ) keyword[for] identifier[callback] keyword[in] identifier[self] . identifier[_zone_callbacks] : identifier[callback] ( identifier[zone_id] , identifier[name] , identifier[value] )
def _store_cached_zone_variable(self, zone_id, name, value): """ Stores the current known value of a zone variable into the cache. Calls any zone callbacks. """ zone_state = self._zone_state.setdefault(zone_id, {}) name = name.lower() zone_state[name] = value logger.debug('Zone Cache store %s.%s = %s', zone_id.device_str(), name, value) for callback in self._zone_callbacks: callback(zone_id, name, value) # depends on [control=['for'], data=['callback']]
def addSpecfile(self, specfiles, path): """Prepares the container for loading ``mrc`` files by adding specfile entries to ``self.info``. Use :func:`MsrunContainer.load()` afterwards to actually import the files :param specfiles: the name of an ms-run file or a list of names :type specfiles: str or [str, str, ...] :param path: filedirectory used for loading and saving ``mrc`` files """ for specfile in aux.toList(specfiles): if specfile not in self.info: self._addSpecfile(specfile, path) else: warntext = 'Error while calling "MsrunContainer.addSpecfile()"'\ ': "%s" is already present "MsrunContainer.info"'\ % (specfile, ) warnings.warn(warntext)
def function[addSpecfile, parameter[self, specfiles, path]]: constant[Prepares the container for loading ``mrc`` files by adding specfile entries to ``self.info``. Use :func:`MsrunContainer.load()` afterwards to actually import the files :param specfiles: the name of an ms-run file or a list of names :type specfiles: str or [str, str, ...] :param path: filedirectory used for loading and saving ``mrc`` files ] for taget[name[specfile]] in starred[call[name[aux].toList, parameter[name[specfiles]]]] begin[:] if compare[name[specfile] <ast.NotIn object at 0x7da2590d7190> name[self].info] begin[:] call[name[self]._addSpecfile, parameter[name[specfile], name[path]]]
keyword[def] identifier[addSpecfile] ( identifier[self] , identifier[specfiles] , identifier[path] ): literal[string] keyword[for] identifier[specfile] keyword[in] identifier[aux] . identifier[toList] ( identifier[specfiles] ): keyword[if] identifier[specfile] keyword[not] keyword[in] identifier[self] . identifier[info] : identifier[self] . identifier[_addSpecfile] ( identifier[specfile] , identifier[path] ) keyword[else] : identifier[warntext] = literal[string] literal[string] %( identifier[specfile] ,) identifier[warnings] . identifier[warn] ( identifier[warntext] )
def addSpecfile(self, specfiles, path): """Prepares the container for loading ``mrc`` files by adding specfile entries to ``self.info``. Use :func:`MsrunContainer.load()` afterwards to actually import the files :param specfiles: the name of an ms-run file or a list of names :type specfiles: str or [str, str, ...] :param path: filedirectory used for loading and saving ``mrc`` files """ for specfile in aux.toList(specfiles): if specfile not in self.info: self._addSpecfile(specfile, path) # depends on [control=['if'], data=['specfile']] else: warntext = 'Error while calling "MsrunContainer.addSpecfile()": "%s" is already present "MsrunContainer.info"' % (specfile,) warnings.warn(warntext) # depends on [control=['for'], data=['specfile']]
def respond(text=None, ssml=None, attributes=None, reprompt_text=None, reprompt_ssml=None, end_session=True): """ Build a dict containing a valid response to an Alexa request. If speech output is desired, either of `text` or `ssml` should be specified. :param text: Plain text speech output to be said by Alexa device. :param ssml: Speech output in SSML form. :param attributes: Dictionary of attributes to store in the session. :param end_session: Should the session be terminated after this response? :param reprompt_text, reprompt_ssml: Works the same as `text`/`ssml`, but instead sets the reprompting speech output. """ obj = { 'version': '1.0', 'response': { 'outputSpeech': {'type': 'PlainText', 'text': ''}, 'shouldEndSession': end_session }, 'sessionAttributes': attributes or {} } if text: obj['response']['outputSpeech'] = {'type': 'PlainText', 'text': text} elif ssml: obj['response']['outputSpeech'] = {'type': 'SSML', 'ssml': ssml} reprompt_output = None if reprompt_text: reprompt_output = {'type': 'PlainText', 'text': reprompt_text} elif reprompt_ssml: reprompt_output = {'type': 'SSML', 'ssml': reprompt_ssml} if reprompt_output: obj['response']['reprompt'] = {'outputSpeech': reprompt_output} return obj
def function[respond, parameter[text, ssml, attributes, reprompt_text, reprompt_ssml, end_session]]: constant[ Build a dict containing a valid response to an Alexa request. If speech output is desired, either of `text` or `ssml` should be specified. :param text: Plain text speech output to be said by Alexa device. :param ssml: Speech output in SSML form. :param attributes: Dictionary of attributes to store in the session. :param end_session: Should the session be terminated after this response? :param reprompt_text, reprompt_ssml: Works the same as `text`/`ssml`, but instead sets the reprompting speech output. ] variable[obj] assign[=] dictionary[[<ast.Constant object at 0x7da18f58fe50>, <ast.Constant object at 0x7da18f58d0c0>, <ast.Constant object at 0x7da18f58c700>], [<ast.Constant object at 0x7da18f58f340>, <ast.Dict object at 0x7da18f58e350>, <ast.BoolOp object at 0x7da18f58c2b0>]] if name[text] begin[:] call[call[name[obj]][constant[response]]][constant[outputSpeech]] assign[=] dictionary[[<ast.Constant object at 0x7da18f58de70>, <ast.Constant object at 0x7da18f58d330>], [<ast.Constant object at 0x7da18f58e500>, <ast.Name object at 0x7da18f58e260>]] variable[reprompt_output] assign[=] constant[None] if name[reprompt_text] begin[:] variable[reprompt_output] assign[=] dictionary[[<ast.Constant object at 0x7da18f58f2e0>, <ast.Constant object at 0x7da18f58c310>], [<ast.Constant object at 0x7da18f58dab0>, <ast.Name object at 0x7da18f58c190>]] if name[reprompt_output] begin[:] call[call[name[obj]][constant[response]]][constant[reprompt]] assign[=] dictionary[[<ast.Constant object at 0x7da20c6aab90>], [<ast.Name object at 0x7da20c6a8910>]] return[name[obj]]
keyword[def] identifier[respond] ( identifier[text] = keyword[None] , identifier[ssml] = keyword[None] , identifier[attributes] = keyword[None] , identifier[reprompt_text] = keyword[None] , identifier[reprompt_ssml] = keyword[None] , identifier[end_session] = keyword[True] ): literal[string] identifier[obj] ={ literal[string] : literal[string] , literal[string] :{ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }, literal[string] : identifier[end_session] }, literal[string] : identifier[attributes] keyword[or] {} } keyword[if] identifier[text] : identifier[obj] [ literal[string] ][ literal[string] ]={ literal[string] : literal[string] , literal[string] : identifier[text] } keyword[elif] identifier[ssml] : identifier[obj] [ literal[string] ][ literal[string] ]={ literal[string] : literal[string] , literal[string] : identifier[ssml] } identifier[reprompt_output] = keyword[None] keyword[if] identifier[reprompt_text] : identifier[reprompt_output] ={ literal[string] : literal[string] , literal[string] : identifier[reprompt_text] } keyword[elif] identifier[reprompt_ssml] : identifier[reprompt_output] ={ literal[string] : literal[string] , literal[string] : identifier[reprompt_ssml] } keyword[if] identifier[reprompt_output] : identifier[obj] [ literal[string] ][ literal[string] ]={ literal[string] : identifier[reprompt_output] } keyword[return] identifier[obj]
def respond(text=None, ssml=None, attributes=None, reprompt_text=None, reprompt_ssml=None, end_session=True): """ Build a dict containing a valid response to an Alexa request. If speech output is desired, either of `text` or `ssml` should be specified. :param text: Plain text speech output to be said by Alexa device. :param ssml: Speech output in SSML form. :param attributes: Dictionary of attributes to store in the session. :param end_session: Should the session be terminated after this response? :param reprompt_text, reprompt_ssml: Works the same as `text`/`ssml`, but instead sets the reprompting speech output. """ obj = {'version': '1.0', 'response': {'outputSpeech': {'type': 'PlainText', 'text': ''}, 'shouldEndSession': end_session}, 'sessionAttributes': attributes or {}} if text: obj['response']['outputSpeech'] = {'type': 'PlainText', 'text': text} # depends on [control=['if'], data=[]] elif ssml: obj['response']['outputSpeech'] = {'type': 'SSML', 'ssml': ssml} # depends on [control=['if'], data=[]] reprompt_output = None if reprompt_text: reprompt_output = {'type': 'PlainText', 'text': reprompt_text} # depends on [control=['if'], data=[]] elif reprompt_ssml: reprompt_output = {'type': 'SSML', 'ssml': reprompt_ssml} # depends on [control=['if'], data=[]] if reprompt_output: obj['response']['reprompt'] = {'outputSpeech': reprompt_output} # depends on [control=['if'], data=[]] return obj
def ack(self): """Acknowledge this message as being processed., This will remove the message from the queue. :raises MessageStateError: If the message has already been acknowledged/requeued/rejected. """ if self.acknowledged: raise self.MessageStateError( "Message already acknowledged with state: %s" % self._state) self.backend.ack(self._frame) self._state = "ACK"
def function[ack, parameter[self]]: constant[Acknowledge this message as being processed., This will remove the message from the queue. :raises MessageStateError: If the message has already been acknowledged/requeued/rejected. ] if name[self].acknowledged begin[:] <ast.Raise object at 0x7da1b0faf940> call[name[self].backend.ack, parameter[name[self]._frame]] name[self]._state assign[=] constant[ACK]
keyword[def] identifier[ack] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[acknowledged] : keyword[raise] identifier[self] . identifier[MessageStateError] ( literal[string] % identifier[self] . identifier[_state] ) identifier[self] . identifier[backend] . identifier[ack] ( identifier[self] . identifier[_frame] ) identifier[self] . identifier[_state] = literal[string]
def ack(self): """Acknowledge this message as being processed., This will remove the message from the queue. :raises MessageStateError: If the message has already been acknowledged/requeued/rejected. """ if self.acknowledged: raise self.MessageStateError('Message already acknowledged with state: %s' % self._state) # depends on [control=['if'], data=[]] self.backend.ack(self._frame) self._state = 'ACK'
def setup_logging(filename, log_dir=None, force_setup=False): ''' Try to load logging configuration from a file. Set level to INFO if failed. ''' if not force_setup and ChirpCLI.SETUP_COMPLETED: logging.debug("Master logging has been setup. This call will be ignored.") return if log_dir and not os.path.exists(log_dir): os.makedirs(log_dir) if os.path.isfile(filename): with open(filename) as config_file: try: config = json.load(config_file) logging.config.dictConfig(config) logging.info("logging was setup using {}".format(filename)) ChirpCLI.SETUP_COMPLETED = True except Exception as e: logging.exception("Could not load logging config") # default logging config logging.basicConfig(level=logging.INFO) else: logging.basicConfig(level=logging.INFO)
def function[setup_logging, parameter[filename, log_dir, force_setup]]: constant[ Try to load logging configuration from a file. Set level to INFO if failed. ] if <ast.BoolOp object at 0x7da1b11768f0> begin[:] call[name[logging].debug, parameter[constant[Master logging has been setup. This call will be ignored.]]] return[None] if <ast.BoolOp object at 0x7da1b1175e10> begin[:] call[name[os].makedirs, parameter[name[log_dir]]] if call[name[os].path.isfile, parameter[name[filename]]] begin[:] with call[name[open], parameter[name[filename]]] begin[:] <ast.Try object at 0x7da1b11743d0>
keyword[def] identifier[setup_logging] ( identifier[filename] , identifier[log_dir] = keyword[None] , identifier[force_setup] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[force_setup] keyword[and] identifier[ChirpCLI] . identifier[SETUP_COMPLETED] : identifier[logging] . identifier[debug] ( literal[string] ) keyword[return] keyword[if] identifier[log_dir] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[log_dir] ): identifier[os] . identifier[makedirs] ( identifier[log_dir] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ): keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[config_file] : keyword[try] : identifier[config] = identifier[json] . identifier[load] ( identifier[config_file] ) identifier[logging] . identifier[config] . identifier[dictConfig] ( identifier[config] ) identifier[logging] . identifier[info] ( literal[string] . identifier[format] ( identifier[filename] )) identifier[ChirpCLI] . identifier[SETUP_COMPLETED] = keyword[True] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[logging] . identifier[exception] ( literal[string] ) identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[INFO] ) keyword[else] : identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[INFO] )
def setup_logging(filename, log_dir=None, force_setup=False): """ Try to load logging configuration from a file. Set level to INFO if failed. """ if not force_setup and ChirpCLI.SETUP_COMPLETED: logging.debug('Master logging has been setup. This call will be ignored.') return # depends on [control=['if'], data=[]] if log_dir and (not os.path.exists(log_dir)): os.makedirs(log_dir) # depends on [control=['if'], data=[]] if os.path.isfile(filename): with open(filename) as config_file: try: config = json.load(config_file) logging.config.dictConfig(config) logging.info('logging was setup using {}'.format(filename)) ChirpCLI.SETUP_COMPLETED = True # depends on [control=['try'], data=[]] except Exception as e: logging.exception('Could not load logging config') # default logging config logging.basicConfig(level=logging.INFO) # depends on [control=['except'], data=[]] # depends on [control=['with'], data=['config_file']] # depends on [control=['if'], data=[]] else: logging.basicConfig(level=logging.INFO)
def _effective_filename(self): # type: () -> str """ Returns the filename which is effectively used by the application. If overridden by an environment variable, it will return that filename. """ # same logic for the configuration filename. First, check if we were # initialized with a filename... config_filename = '' if self.filename: config_filename = self.filename # ... next, take the value from the environment env_filename = getenv(self.env_filename_name) if env_filename: self._log.info('Configuration filename was overridden with %r ' 'by the environment variable %s.', env_filename, self.env_filename_name) config_filename = env_filename return config_filename
def function[_effective_filename, parameter[self]]: constant[ Returns the filename which is effectively used by the application. If overridden by an environment variable, it will return that filename. ] variable[config_filename] assign[=] constant[] if name[self].filename begin[:] variable[config_filename] assign[=] name[self].filename variable[env_filename] assign[=] call[name[getenv], parameter[name[self].env_filename_name]] if name[env_filename] begin[:] call[name[self]._log.info, parameter[constant[Configuration filename was overridden with %r by the environment variable %s.], name[env_filename], name[self].env_filename_name]] variable[config_filename] assign[=] name[env_filename] return[name[config_filename]]
keyword[def] identifier[_effective_filename] ( identifier[self] ): literal[string] identifier[config_filename] = literal[string] keyword[if] identifier[self] . identifier[filename] : identifier[config_filename] = identifier[self] . identifier[filename] identifier[env_filename] = identifier[getenv] ( identifier[self] . identifier[env_filename_name] ) keyword[if] identifier[env_filename] : identifier[self] . identifier[_log] . identifier[info] ( literal[string] literal[string] , identifier[env_filename] , identifier[self] . identifier[env_filename_name] ) identifier[config_filename] = identifier[env_filename] keyword[return] identifier[config_filename]
def _effective_filename(self): # type: () -> str '\n Returns the filename which is effectively used by the application. If\n overridden by an environment variable, it will return that filename.\n ' # same logic for the configuration filename. First, check if we were # initialized with a filename... config_filename = '' if self.filename: config_filename = self.filename # depends on [control=['if'], data=[]] # ... next, take the value from the environment env_filename = getenv(self.env_filename_name) if env_filename: self._log.info('Configuration filename was overridden with %r by the environment variable %s.', env_filename, self.env_filename_name) config_filename = env_filename # depends on [control=['if'], data=[]] return config_filename
def get_path(self, x: int, y: int) -> List[Tuple[int, int]]: """Return a list of (x, y) steps to reach the goal point, if possible. """ lib.TCOD_dijkstra_path_set(self._path_c, x, y) path = [] pointer_x = ffi.new("int[2]") pointer_y = pointer_x + 1 while lib.TCOD_dijkstra_path_walk(self._path_c, pointer_x, pointer_y): path.append((pointer_x[0], pointer_y[0])) return path
def function[get_path, parameter[self, x, y]]: constant[Return a list of (x, y) steps to reach the goal point, if possible. ] call[name[lib].TCOD_dijkstra_path_set, parameter[name[self]._path_c, name[x], name[y]]] variable[path] assign[=] list[[]] variable[pointer_x] assign[=] call[name[ffi].new, parameter[constant[int[2]]]] variable[pointer_y] assign[=] binary_operation[name[pointer_x] + constant[1]] while call[name[lib].TCOD_dijkstra_path_walk, parameter[name[self]._path_c, name[pointer_x], name[pointer_y]]] begin[:] call[name[path].append, parameter[tuple[[<ast.Subscript object at 0x7da18eb56260>, <ast.Subscript object at 0x7da18eb57dc0>]]]] return[name[path]]
keyword[def] identifier[get_path] ( identifier[self] , identifier[x] : identifier[int] , identifier[y] : identifier[int] )-> identifier[List] [ identifier[Tuple] [ identifier[int] , identifier[int] ]]: literal[string] identifier[lib] . identifier[TCOD_dijkstra_path_set] ( identifier[self] . identifier[_path_c] , identifier[x] , identifier[y] ) identifier[path] =[] identifier[pointer_x] = identifier[ffi] . identifier[new] ( literal[string] ) identifier[pointer_y] = identifier[pointer_x] + literal[int] keyword[while] identifier[lib] . identifier[TCOD_dijkstra_path_walk] ( identifier[self] . identifier[_path_c] , identifier[pointer_x] , identifier[pointer_y] ): identifier[path] . identifier[append] (( identifier[pointer_x] [ literal[int] ], identifier[pointer_y] [ literal[int] ])) keyword[return] identifier[path]
def get_path(self, x: int, y: int) -> List[Tuple[int, int]]: """Return a list of (x, y) steps to reach the goal point, if possible. """ lib.TCOD_dijkstra_path_set(self._path_c, x, y) path = [] pointer_x = ffi.new('int[2]') pointer_y = pointer_x + 1 while lib.TCOD_dijkstra_path_walk(self._path_c, pointer_x, pointer_y): path.append((pointer_x[0], pointer_y[0])) # depends on [control=['while'], data=[]] return path
def raise_dependencies_check(self, ref_check, hosts, services, timeperiods, macromodulations, checkmodulations, checks): # pylint: disable=too-many-locals, too-many-nested-blocks """Get checks that we depend on if EVERY following conditions is met:: * timeperiod is valid * dep.last_state_update < now - cls.cached_check_horizon (check of dependency is "old") :param ref_check: Check we want to get dependency from :type ref_check: alignak.check.Check :param hosts: hosts objects, used for almost every operation :type hosts: alignak.objects.host.Hosts :param services: services objects, used for almost every operation :type services: alignak.objects.service.Services :param timeperiods: Timeperiods objects, used for all kind of timeperiod (notif, check) :type timeperiods: alignak.objects.timeperiod.Timeperiods :param macromodulations: Macro modulations objects, used in commands (notif, check) :type macromodulations: alignak.objects.macromodulation.Macromodulations :param checkmodulations: Checkmodulations objects, used to change check command if necessary :type checkmodulations: alignak.objects.checkmodulation.Checkmodulations :param checks: checks dict, used to get checks_in_progress for the object :type checks: dict :return: check created and check in_checking :rtype: dict """ now = time.time() cls = self.__class__ new_checks = [] checking_checks = [] for (dep_id, _, timeperiod_id, _) in self.act_depend_of: if dep_id in hosts: dep_item = hosts[dep_id] else: dep_item = services[dep_id] timeperiod = timeperiods[timeperiod_id] # If the dep_item timeperiod is not valid, do not raise the dep, # None=everytime if timeperiod is None or timeperiod.is_time_valid(now): # if the update is 'fresh', do not raise dep, # cached_check_horizon = cached_service_check_horizon for service if dep_item.last_state_update < now - cls.cached_check_horizon: # Do not launch the check if it depends on a passive check of if a check # is yet planned if dep_item.active_checks_enabled: if not dep_item.in_checking: newchk = dep_item.launch_check(now, hosts, services, timeperiods, macromodulations, checkmodulations, checks, ref_check, dependent=True) if newchk is not None: new_checks.append(newchk) else: if dep_item.checks_in_progress: check_uuid = dep_item.checks_in_progress[0] checks[check_uuid].depend_on_me.append(ref_check) checking_checks.append(check_uuid) return {'new': new_checks, 'checking': checking_checks}
def function[raise_dependencies_check, parameter[self, ref_check, hosts, services, timeperiods, macromodulations, checkmodulations, checks]]: constant[Get checks that we depend on if EVERY following conditions is met:: * timeperiod is valid * dep.last_state_update < now - cls.cached_check_horizon (check of dependency is "old") :param ref_check: Check we want to get dependency from :type ref_check: alignak.check.Check :param hosts: hosts objects, used for almost every operation :type hosts: alignak.objects.host.Hosts :param services: services objects, used for almost every operation :type services: alignak.objects.service.Services :param timeperiods: Timeperiods objects, used for all kind of timeperiod (notif, check) :type timeperiods: alignak.objects.timeperiod.Timeperiods :param macromodulations: Macro modulations objects, used in commands (notif, check) :type macromodulations: alignak.objects.macromodulation.Macromodulations :param checkmodulations: Checkmodulations objects, used to change check command if necessary :type checkmodulations: alignak.objects.checkmodulation.Checkmodulations :param checks: checks dict, used to get checks_in_progress for the object :type checks: dict :return: check created and check in_checking :rtype: dict ] variable[now] assign[=] call[name[time].time, parameter[]] variable[cls] assign[=] name[self].__class__ variable[new_checks] assign[=] list[[]] variable[checking_checks] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f7206a0>, <ast.Name object at 0x7da18f720550>, <ast.Name object at 0x7da18f720f70>, <ast.Name object at 0x7da18f723250>]]] in starred[name[self].act_depend_of] begin[:] if compare[name[dep_id] in name[hosts]] begin[:] variable[dep_item] assign[=] call[name[hosts]][name[dep_id]] variable[timeperiod] assign[=] call[name[timeperiods]][name[timeperiod_id]] if <ast.BoolOp object at 0x7da18f7218a0> begin[:] if compare[name[dep_item].last_state_update less[<] binary_operation[name[now] - name[cls].cached_check_horizon]] begin[:] if name[dep_item].active_checks_enabled begin[:] if <ast.UnaryOp object at 0x7da18f721f30> begin[:] variable[newchk] assign[=] call[name[dep_item].launch_check, parameter[name[now], name[hosts], name[services], name[timeperiods], name[macromodulations], name[checkmodulations], name[checks], name[ref_check]]] if compare[name[newchk] is_not constant[None]] begin[:] call[name[new_checks].append, parameter[name[newchk]]] return[dictionary[[<ast.Constant object at 0x7da18f721b70>, <ast.Constant object at 0x7da18f7220e0>], [<ast.Name object at 0x7da18f7214e0>, <ast.Name object at 0x7da18f722470>]]]
keyword[def] identifier[raise_dependencies_check] ( identifier[self] , identifier[ref_check] , identifier[hosts] , identifier[services] , identifier[timeperiods] , identifier[macromodulations] , identifier[checkmodulations] , identifier[checks] ): literal[string] identifier[now] = identifier[time] . identifier[time] () identifier[cls] = identifier[self] . identifier[__class__] identifier[new_checks] =[] identifier[checking_checks] =[] keyword[for] ( identifier[dep_id] , identifier[_] , identifier[timeperiod_id] , identifier[_] ) keyword[in] identifier[self] . identifier[act_depend_of] : keyword[if] identifier[dep_id] keyword[in] identifier[hosts] : identifier[dep_item] = identifier[hosts] [ identifier[dep_id] ] keyword[else] : identifier[dep_item] = identifier[services] [ identifier[dep_id] ] identifier[timeperiod] = identifier[timeperiods] [ identifier[timeperiod_id] ] keyword[if] identifier[timeperiod] keyword[is] keyword[None] keyword[or] identifier[timeperiod] . identifier[is_time_valid] ( identifier[now] ): keyword[if] identifier[dep_item] . identifier[last_state_update] < identifier[now] - identifier[cls] . identifier[cached_check_horizon] : keyword[if] identifier[dep_item] . identifier[active_checks_enabled] : keyword[if] keyword[not] identifier[dep_item] . identifier[in_checking] : identifier[newchk] = identifier[dep_item] . identifier[launch_check] ( identifier[now] , identifier[hosts] , identifier[services] , identifier[timeperiods] , identifier[macromodulations] , identifier[checkmodulations] , identifier[checks] , identifier[ref_check] , identifier[dependent] = keyword[True] ) keyword[if] identifier[newchk] keyword[is] keyword[not] keyword[None] : identifier[new_checks] . identifier[append] ( identifier[newchk] ) keyword[else] : keyword[if] identifier[dep_item] . identifier[checks_in_progress] : identifier[check_uuid] = identifier[dep_item] . identifier[checks_in_progress] [ literal[int] ] identifier[checks] [ identifier[check_uuid] ]. identifier[depend_on_me] . identifier[append] ( identifier[ref_check] ) identifier[checking_checks] . identifier[append] ( identifier[check_uuid] ) keyword[return] { literal[string] : identifier[new_checks] , literal[string] : identifier[checking_checks] }
def raise_dependencies_check(self, ref_check, hosts, services, timeperiods, macromodulations, checkmodulations, checks): # pylint: disable=too-many-locals, too-many-nested-blocks 'Get checks that we depend on if EVERY following conditions is met::\n\n * timeperiod is valid\n * dep.last_state_update < now - cls.cached_check_horizon (check of dependency is "old")\n\n :param ref_check: Check we want to get dependency from\n :type ref_check: alignak.check.Check\n :param hosts: hosts objects, used for almost every operation\n :type hosts: alignak.objects.host.Hosts\n :param services: services objects, used for almost every operation\n :type services: alignak.objects.service.Services\n :param timeperiods: Timeperiods objects, used for all kind of timeperiod (notif, check)\n :type timeperiods: alignak.objects.timeperiod.Timeperiods\n :param macromodulations: Macro modulations objects, used in commands (notif, check)\n :type macromodulations: alignak.objects.macromodulation.Macromodulations\n :param checkmodulations: Checkmodulations objects, used to change check command if necessary\n :type checkmodulations: alignak.objects.checkmodulation.Checkmodulations\n :param checks: checks dict, used to get checks_in_progress for the object\n :type checks: dict\n :return: check created and check in_checking\n :rtype: dict\n ' now = time.time() cls = self.__class__ new_checks = [] checking_checks = [] for (dep_id, _, timeperiod_id, _) in self.act_depend_of: if dep_id in hosts: dep_item = hosts[dep_id] # depends on [control=['if'], data=['dep_id', 'hosts']] else: dep_item = services[dep_id] timeperiod = timeperiods[timeperiod_id] # If the dep_item timeperiod is not valid, do not raise the dep, # None=everytime if timeperiod is None or timeperiod.is_time_valid(now): # if the update is 'fresh', do not raise dep, # cached_check_horizon = cached_service_check_horizon for service if dep_item.last_state_update < now - cls.cached_check_horizon: # Do not launch the check if it depends on a passive check of if a check # is yet planned if dep_item.active_checks_enabled: if not dep_item.in_checking: newchk = dep_item.launch_check(now, hosts, services, timeperiods, macromodulations, checkmodulations, checks, ref_check, dependent=True) if newchk is not None: new_checks.append(newchk) # depends on [control=['if'], data=['newchk']] # depends on [control=['if'], data=[]] elif dep_item.checks_in_progress: check_uuid = dep_item.checks_in_progress[0] checks[check_uuid].depend_on_me.append(ref_check) checking_checks.append(check_uuid) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return {'new': new_checks, 'checking': checking_checks}
def _get_final_set(self, sets, pk, sort_options): """ Called by _collection to get the final set to work on. Return the name of the set to use, and a list of keys to delete once the collection is really called (in case of a computed set based on multiple ones) """ conn = self.cls.get_connection() all_sets = set() tmp_keys = set() if pk is not None and not sets and not (sort_options and sort_options.get('get')): # no final set if only a pk without values to retrieve return (None, False) elif sets or pk: if sets: new_sets, new_tmp_keys = self._prepare_sets(sets) all_sets.update(new_sets) tmp_keys.update(new_tmp_keys) if pk is not None: # create a set with the pk to do intersection (and to pass it to # the store command to retrieve values if needed) tmp_key = self._unique_key() conn.sadd(tmp_key, pk) all_sets.add(tmp_key) tmp_keys.add(tmp_key) else: # no sets or pk, use the whole collection instead all_sets.add(self.cls.get_field('pk').collection_key) if not all_sets: delete_set_later = False final_set = None elif len(all_sets) == 1: # if we have only one set, we delete the set after calling # collection only if it's a temporary one, and we do not delete # it right now final_set = all_sets.pop() if final_set in tmp_keys: delete_set_later = True tmp_keys.remove(final_set) else: delete_set_later = False else: # more than one set, do an intersection on all of them in a new key # that will must be deleted once the collection is called. delete_set_later = True final_set = self._combine_sets(all_sets, self._unique_key()) if tmp_keys: conn.delete(*tmp_keys) # return the final set to work on, and a flag if we later need to delete it return (final_set, [final_set] if delete_set_later else None)
def function[_get_final_set, parameter[self, sets, pk, sort_options]]: constant[ Called by _collection to get the final set to work on. Return the name of the set to use, and a list of keys to delete once the collection is really called (in case of a computed set based on multiple ones) ] variable[conn] assign[=] call[name[self].cls.get_connection, parameter[]] variable[all_sets] assign[=] call[name[set], parameter[]] variable[tmp_keys] assign[=] call[name[set], parameter[]] if <ast.BoolOp object at 0x7da1b25dfa90> begin[:] return[tuple[[<ast.Constant object at 0x7da1b25dee00>, <ast.Constant object at 0x7da1b25df850>]]] if <ast.UnaryOp object at 0x7da1b25b2560> begin[:] variable[delete_set_later] assign[=] constant[False] variable[final_set] assign[=] constant[None] if name[tmp_keys] begin[:] call[name[conn].delete, parameter[<ast.Starred object at 0x7da1b25b2a40>]] return[tuple[[<ast.Name object at 0x7da1b25b00a0>, <ast.IfExp object at 0x7da1b25b00d0>]]]
keyword[def] identifier[_get_final_set] ( identifier[self] , identifier[sets] , identifier[pk] , identifier[sort_options] ): literal[string] identifier[conn] = identifier[self] . identifier[cls] . identifier[get_connection] () identifier[all_sets] = identifier[set] () identifier[tmp_keys] = identifier[set] () keyword[if] identifier[pk] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[sets] keyword[and] keyword[not] ( identifier[sort_options] keyword[and] identifier[sort_options] . identifier[get] ( literal[string] )): keyword[return] ( keyword[None] , keyword[False] ) keyword[elif] identifier[sets] keyword[or] identifier[pk] : keyword[if] identifier[sets] : identifier[new_sets] , identifier[new_tmp_keys] = identifier[self] . identifier[_prepare_sets] ( identifier[sets] ) identifier[all_sets] . identifier[update] ( identifier[new_sets] ) identifier[tmp_keys] . identifier[update] ( identifier[new_tmp_keys] ) keyword[if] identifier[pk] keyword[is] keyword[not] keyword[None] : identifier[tmp_key] = identifier[self] . identifier[_unique_key] () identifier[conn] . identifier[sadd] ( identifier[tmp_key] , identifier[pk] ) identifier[all_sets] . identifier[add] ( identifier[tmp_key] ) identifier[tmp_keys] . identifier[add] ( identifier[tmp_key] ) keyword[else] : identifier[all_sets] . identifier[add] ( identifier[self] . identifier[cls] . identifier[get_field] ( literal[string] ). identifier[collection_key] ) keyword[if] keyword[not] identifier[all_sets] : identifier[delete_set_later] = keyword[False] identifier[final_set] = keyword[None] keyword[elif] identifier[len] ( identifier[all_sets] )== literal[int] : identifier[final_set] = identifier[all_sets] . identifier[pop] () keyword[if] identifier[final_set] keyword[in] identifier[tmp_keys] : identifier[delete_set_later] = keyword[True] identifier[tmp_keys] . identifier[remove] ( identifier[final_set] ) keyword[else] : identifier[delete_set_later] = keyword[False] keyword[else] : identifier[delete_set_later] = keyword[True] identifier[final_set] = identifier[self] . identifier[_combine_sets] ( identifier[all_sets] , identifier[self] . identifier[_unique_key] ()) keyword[if] identifier[tmp_keys] : identifier[conn] . identifier[delete] (* identifier[tmp_keys] ) keyword[return] ( identifier[final_set] ,[ identifier[final_set] ] keyword[if] identifier[delete_set_later] keyword[else] keyword[None] )
def _get_final_set(self, sets, pk, sort_options): """ Called by _collection to get the final set to work on. Return the name of the set to use, and a list of keys to delete once the collection is really called (in case of a computed set based on multiple ones) """ conn = self.cls.get_connection() all_sets = set() tmp_keys = set() if pk is not None and (not sets) and (not (sort_options and sort_options.get('get'))): # no final set if only a pk without values to retrieve return (None, False) # depends on [control=['if'], data=[]] elif sets or pk: if sets: (new_sets, new_tmp_keys) = self._prepare_sets(sets) all_sets.update(new_sets) tmp_keys.update(new_tmp_keys) # depends on [control=['if'], data=[]] if pk is not None: # create a set with the pk to do intersection (and to pass it to # the store command to retrieve values if needed) tmp_key = self._unique_key() conn.sadd(tmp_key, pk) all_sets.add(tmp_key) tmp_keys.add(tmp_key) # depends on [control=['if'], data=['pk']] # depends on [control=['if'], data=[]] else: # no sets or pk, use the whole collection instead all_sets.add(self.cls.get_field('pk').collection_key) if not all_sets: delete_set_later = False final_set = None # depends on [control=['if'], data=[]] elif len(all_sets) == 1: # if we have only one set, we delete the set after calling # collection only if it's a temporary one, and we do not delete # it right now final_set = all_sets.pop() if final_set in tmp_keys: delete_set_later = True tmp_keys.remove(final_set) # depends on [control=['if'], data=['final_set', 'tmp_keys']] else: delete_set_later = False # depends on [control=['if'], data=[]] else: # more than one set, do an intersection on all of them in a new key # that will must be deleted once the collection is called. delete_set_later = True final_set = self._combine_sets(all_sets, self._unique_key()) if tmp_keys: conn.delete(*tmp_keys) # depends on [control=['if'], data=[]] # return the final set to work on, and a flag if we later need to delete it return (final_set, [final_set] if delete_set_later else None)
def execute_sql(self, statement): """ Executes a single SQL statement. :param statement: SQL string :return: String response :rtype: str """ path = '/archive/{}/sql'.format(self._instance) req = archive_pb2.ExecuteSqlRequest() req.statement = statement response = self._client.post_proto(path=path, data=req.SerializeToString()) message = archive_pb2.ExecuteSqlResponse() message.ParseFromString(response.content) if message.HasField('result'): return message.result return None
def function[execute_sql, parameter[self, statement]]: constant[ Executes a single SQL statement. :param statement: SQL string :return: String response :rtype: str ] variable[path] assign[=] call[constant[/archive/{}/sql].format, parameter[name[self]._instance]] variable[req] assign[=] call[name[archive_pb2].ExecuteSqlRequest, parameter[]] name[req].statement assign[=] name[statement] variable[response] assign[=] call[name[self]._client.post_proto, parameter[]] variable[message] assign[=] call[name[archive_pb2].ExecuteSqlResponse, parameter[]] call[name[message].ParseFromString, parameter[name[response].content]] if call[name[message].HasField, parameter[constant[result]]] begin[:] return[name[message].result] return[constant[None]]
keyword[def] identifier[execute_sql] ( identifier[self] , identifier[statement] ): literal[string] identifier[path] = literal[string] . identifier[format] ( identifier[self] . identifier[_instance] ) identifier[req] = identifier[archive_pb2] . identifier[ExecuteSqlRequest] () identifier[req] . identifier[statement] = identifier[statement] identifier[response] = identifier[self] . identifier[_client] . identifier[post_proto] ( identifier[path] = identifier[path] , identifier[data] = identifier[req] . identifier[SerializeToString] ()) identifier[message] = identifier[archive_pb2] . identifier[ExecuteSqlResponse] () identifier[message] . identifier[ParseFromString] ( identifier[response] . identifier[content] ) keyword[if] identifier[message] . identifier[HasField] ( literal[string] ): keyword[return] identifier[message] . identifier[result] keyword[return] keyword[None]
def execute_sql(self, statement): """ Executes a single SQL statement. :param statement: SQL string :return: String response :rtype: str """ path = '/archive/{}/sql'.format(self._instance) req = archive_pb2.ExecuteSqlRequest() req.statement = statement response = self._client.post_proto(path=path, data=req.SerializeToString()) message = archive_pb2.ExecuteSqlResponse() message.ParseFromString(response.content) if message.HasField('result'): return message.result # depends on [control=['if'], data=[]] return None
def emit(self, span_datas): """ :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit """ # convert to the legacy trace json for easier refactoring # TODO: refactor this to use the span data directly legacy_trace_json = span_data.format_legacy_trace_json(span_datas) self.logger.info(legacy_trace_json)
def function[emit, parameter[self, span_datas]]: constant[ :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit ] variable[legacy_trace_json] assign[=] call[name[span_data].format_legacy_trace_json, parameter[name[span_datas]]] call[name[self].logger.info, parameter[name[legacy_trace_json]]]
keyword[def] identifier[emit] ( identifier[self] , identifier[span_datas] ): literal[string] identifier[legacy_trace_json] = identifier[span_data] . identifier[format_legacy_trace_json] ( identifier[span_datas] ) identifier[self] . identifier[logger] . identifier[info] ( identifier[legacy_trace_json] )
def emit(self, span_datas): """ :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit """ # convert to the legacy trace json for easier refactoring # TODO: refactor this to use the span data directly legacy_trace_json = span_data.format_legacy_trace_json(span_datas) self.logger.info(legacy_trace_json)
def local_property(f): '''Decorator to be used in conjunction with :class:`LocalMixin` methods. ''' name = f.__name__ def _(self): local = self.local if name not in local: setattr(local, name, f(self)) return getattr(local, name) return property(_, doc=f.__doc__)
def function[local_property, parameter[f]]: constant[Decorator to be used in conjunction with :class:`LocalMixin` methods. ] variable[name] assign[=] name[f].__name__ def function[_, parameter[self]]: variable[local] assign[=] name[self].local if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[local]] begin[:] call[name[setattr], parameter[name[local], name[name], call[name[f], parameter[name[self]]]]] return[call[name[getattr], parameter[name[local], name[name]]]] return[call[name[property], parameter[name[_]]]]
keyword[def] identifier[local_property] ( identifier[f] ): literal[string] identifier[name] = identifier[f] . identifier[__name__] keyword[def] identifier[_] ( identifier[self] ): identifier[local] = identifier[self] . identifier[local] keyword[if] identifier[name] keyword[not] keyword[in] identifier[local] : identifier[setattr] ( identifier[local] , identifier[name] , identifier[f] ( identifier[self] )) keyword[return] identifier[getattr] ( identifier[local] , identifier[name] ) keyword[return] identifier[property] ( identifier[_] , identifier[doc] = identifier[f] . identifier[__doc__] )
def local_property(f): """Decorator to be used in conjunction with :class:`LocalMixin` methods. """ name = f.__name__ def _(self): local = self.local if name not in local: setattr(local, name, f(self)) # depends on [control=['if'], data=['name', 'local']] return getattr(local, name) return property(_, doc=f.__doc__)
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None, disable_cache=None): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) else: accept_encoding = ACCEPT_ENCODING headers['accept-encoding'] = accept_encoding if user_agent: headers['user-agent'] = user_agent if keep_alive: headers['connection'] = 'keep-alive' if basic_auth: headers['authorization'] = 'Basic ' + \ b64encode(b(basic_auth)).decode('utf-8') if proxy_basic_auth: headers['proxy-authorization'] = 'Basic ' + \ b64encode(b(proxy_basic_auth)).decode('utf-8') if disable_cache: headers['cache-control'] = 'no-cache' return headers
def function[make_headers, parameter[keep_alive, accept_encoding, user_agent, basic_auth, proxy_basic_auth, disable_cache]]: constant[ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} ] variable[headers] assign[=] dictionary[[], []] if name[accept_encoding] begin[:] if call[name[isinstance], parameter[name[accept_encoding], name[str]]] begin[:] pass call[name[headers]][constant[accept-encoding]] assign[=] name[accept_encoding] if name[user_agent] begin[:] call[name[headers]][constant[user-agent]] assign[=] name[user_agent] if name[keep_alive] begin[:] call[name[headers]][constant[connection]] assign[=] constant[keep-alive] if name[basic_auth] begin[:] call[name[headers]][constant[authorization]] assign[=] binary_operation[constant[Basic ] + call[call[name[b64encode], parameter[call[name[b], parameter[name[basic_auth]]]]].decode, parameter[constant[utf-8]]]] if name[proxy_basic_auth] begin[:] call[name[headers]][constant[proxy-authorization]] assign[=] binary_operation[constant[Basic ] + call[call[name[b64encode], parameter[call[name[b], parameter[name[proxy_basic_auth]]]]].decode, parameter[constant[utf-8]]]] if name[disable_cache] begin[:] call[name[headers]][constant[cache-control]] assign[=] constant[no-cache] return[name[headers]]
keyword[def] identifier[make_headers] ( identifier[keep_alive] = keyword[None] , identifier[accept_encoding] = keyword[None] , identifier[user_agent] = keyword[None] , identifier[basic_auth] = keyword[None] , identifier[proxy_basic_auth] = keyword[None] , identifier[disable_cache] = keyword[None] ): literal[string] identifier[headers] ={} keyword[if] identifier[accept_encoding] : keyword[if] identifier[isinstance] ( identifier[accept_encoding] , identifier[str] ): keyword[pass] keyword[elif] identifier[isinstance] ( identifier[accept_encoding] , identifier[list] ): identifier[accept_encoding] = literal[string] . identifier[join] ( identifier[accept_encoding] ) keyword[else] : identifier[accept_encoding] = identifier[ACCEPT_ENCODING] identifier[headers] [ literal[string] ]= identifier[accept_encoding] keyword[if] identifier[user_agent] : identifier[headers] [ literal[string] ]= identifier[user_agent] keyword[if] identifier[keep_alive] : identifier[headers] [ literal[string] ]= literal[string] keyword[if] identifier[basic_auth] : identifier[headers] [ literal[string] ]= literal[string] + identifier[b64encode] ( identifier[b] ( identifier[basic_auth] )). identifier[decode] ( literal[string] ) keyword[if] identifier[proxy_basic_auth] : identifier[headers] [ literal[string] ]= literal[string] + identifier[b64encode] ( identifier[b] ( identifier[proxy_basic_auth] )). identifier[decode] ( literal[string] ) keyword[if] identifier[disable_cache] : identifier[headers] [ literal[string] ]= literal[string] keyword[return] identifier[headers]
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None, basic_auth=None, proxy_basic_auth=None, disable_cache=None): """ Shortcuts for generating request headers. :param keep_alive: If ``True``, adds 'connection: keep-alive' header. :param accept_encoding: Can be a boolean, list, or string. ``True`` translates to 'gzip,deflate'. List will get joined by comma. String will be used as provided. :param user_agent: String representing the user-agent you want, such as "python-urllib3/0.6" :param basic_auth: Colon-separated username:password string for 'authorization: basic ...' auth header. :param proxy_basic_auth: Colon-separated username:password string for 'proxy-authorization: basic ...' auth header. :param disable_cache: If ``True``, adds 'cache-control: no-cache' header. Example:: >>> make_headers(keep_alive=True, user_agent="Batman/1.0") {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} >>> make_headers(accept_encoding=True) {'accept-encoding': 'gzip,deflate'} """ headers = {} if accept_encoding: if isinstance(accept_encoding, str): pass # depends on [control=['if'], data=[]] elif isinstance(accept_encoding, list): accept_encoding = ','.join(accept_encoding) # depends on [control=['if'], data=[]] else: accept_encoding = ACCEPT_ENCODING headers['accept-encoding'] = accept_encoding # depends on [control=['if'], data=[]] if user_agent: headers['user-agent'] = user_agent # depends on [control=['if'], data=[]] if keep_alive: headers['connection'] = 'keep-alive' # depends on [control=['if'], data=[]] if basic_auth: headers['authorization'] = 'Basic ' + b64encode(b(basic_auth)).decode('utf-8') # depends on [control=['if'], data=[]] if proxy_basic_auth: headers['proxy-authorization'] = 'Basic ' + b64encode(b(proxy_basic_auth)).decode('utf-8') # depends on [control=['if'], data=[]] if disable_cache: headers['cache-control'] = 'no-cache' # depends on [control=['if'], data=[]] return headers
def execute(self, context): """Upload a file to Azure Blob Storage.""" hook = WasbHook(wasb_conn_id=self.wasb_conn_id) self.log.info( 'Uploading %s to wasb://%s ' 'as %s'.format(self.file_path, self.container_name, self.blob_name) ) hook.load_file(self.file_path, self.container_name, self.blob_name, **self.load_options)
def function[execute, parameter[self, context]]: constant[Upload a file to Azure Blob Storage.] variable[hook] assign[=] call[name[WasbHook], parameter[]] call[name[self].log.info, parameter[call[constant[Uploading %s to wasb://%s as %s].format, parameter[name[self].file_path, name[self].container_name, name[self].blob_name]]]] call[name[hook].load_file, parameter[name[self].file_path, name[self].container_name, name[self].blob_name]]
keyword[def] identifier[execute] ( identifier[self] , identifier[context] ): literal[string] identifier[hook] = identifier[WasbHook] ( identifier[wasb_conn_id] = identifier[self] . identifier[wasb_conn_id] ) identifier[self] . identifier[log] . identifier[info] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[file_path] , identifier[self] . identifier[container_name] , identifier[self] . identifier[blob_name] ) ) identifier[hook] . identifier[load_file] ( identifier[self] . identifier[file_path] , identifier[self] . identifier[container_name] , identifier[self] . identifier[blob_name] ,** identifier[self] . identifier[load_options] )
def execute(self, context): """Upload a file to Azure Blob Storage.""" hook = WasbHook(wasb_conn_id=self.wasb_conn_id) self.log.info('Uploading %s to wasb://%s as %s'.format(self.file_path, self.container_name, self.blob_name)) hook.load_file(self.file_path, self.container_name, self.blob_name, **self.load_options)
def from_http(cls, headers: Mapping[str, str], body: bytes, *, secret: Optional[str] = None) -> "Event": """Construct an event from HTTP headers and JSON body data. The mapping providing the headers is expected to support lowercase keys. Since this method assumes the body of the HTTP request is JSON, a check is performed for a content-type of "application/json" (GitHub does support other content-types). If the content-type does not match, BadRequest is raised. If the appropriate headers are provided for event validation, then it will be performed unconditionally. Any failure in validation (including not providing a secret) will lead to ValidationFailure being raised. """ if "x-hub-signature" in headers: if secret is None: raise ValidationFailure("secret not provided") validate_event(body, signature=headers["x-hub-signature"], secret=secret) elif secret is not None: raise ValidationFailure("signature is missing") try: data = _decode_body(headers["content-type"], body, strict=True) except (KeyError, ValueError) as exc: raise BadRequest(http.HTTPStatus(415), "expected a content-type of " "'application/json' or " "'application/x-www-form-urlencoded'") from exc return cls(data, event=headers["x-github-event"], delivery_id=headers["x-github-delivery"])
def function[from_http, parameter[cls, headers, body]]: constant[Construct an event from HTTP headers and JSON body data. The mapping providing the headers is expected to support lowercase keys. Since this method assumes the body of the HTTP request is JSON, a check is performed for a content-type of "application/json" (GitHub does support other content-types). If the content-type does not match, BadRequest is raised. If the appropriate headers are provided for event validation, then it will be performed unconditionally. Any failure in validation (including not providing a secret) will lead to ValidationFailure being raised. ] if compare[constant[x-hub-signature] in name[headers]] begin[:] if compare[name[secret] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1346200> call[name[validate_event], parameter[name[body]]] <ast.Try object at 0x7da1b1344f70> return[call[name[cls], parameter[name[data]]]]
keyword[def] identifier[from_http] ( identifier[cls] , identifier[headers] : identifier[Mapping] [ identifier[str] , identifier[str] ], identifier[body] : identifier[bytes] , *, identifier[secret] : identifier[Optional] [ identifier[str] ]= keyword[None] )-> literal[string] : literal[string] keyword[if] literal[string] keyword[in] identifier[headers] : keyword[if] identifier[secret] keyword[is] keyword[None] : keyword[raise] identifier[ValidationFailure] ( literal[string] ) identifier[validate_event] ( identifier[body] , identifier[signature] = identifier[headers] [ literal[string] ], identifier[secret] = identifier[secret] ) keyword[elif] identifier[secret] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[ValidationFailure] ( literal[string] ) keyword[try] : identifier[data] = identifier[_decode_body] ( identifier[headers] [ literal[string] ], identifier[body] , identifier[strict] = keyword[True] ) keyword[except] ( identifier[KeyError] , identifier[ValueError] ) keyword[as] identifier[exc] : keyword[raise] identifier[BadRequest] ( identifier[http] . identifier[HTTPStatus] ( literal[int] ), literal[string] literal[string] literal[string] ) keyword[from] identifier[exc] keyword[return] identifier[cls] ( identifier[data] , identifier[event] = identifier[headers] [ literal[string] ], identifier[delivery_id] = identifier[headers] [ literal[string] ])
def from_http(cls, headers: Mapping[str, str], body: bytes, *, secret: Optional[str]=None) -> 'Event': """Construct an event from HTTP headers and JSON body data. The mapping providing the headers is expected to support lowercase keys. Since this method assumes the body of the HTTP request is JSON, a check is performed for a content-type of "application/json" (GitHub does support other content-types). If the content-type does not match, BadRequest is raised. If the appropriate headers are provided for event validation, then it will be performed unconditionally. Any failure in validation (including not providing a secret) will lead to ValidationFailure being raised. """ if 'x-hub-signature' in headers: if secret is None: raise ValidationFailure('secret not provided') # depends on [control=['if'], data=[]] validate_event(body, signature=headers['x-hub-signature'], secret=secret) # depends on [control=['if'], data=['headers']] elif secret is not None: raise ValidationFailure('signature is missing') # depends on [control=['if'], data=[]] try: data = _decode_body(headers['content-type'], body, strict=True) # depends on [control=['try'], data=[]] except (KeyError, ValueError) as exc: raise BadRequest(http.HTTPStatus(415), "expected a content-type of 'application/json' or 'application/x-www-form-urlencoded'") from exc # depends on [control=['except'], data=['exc']] return cls(data, event=headers['x-github-event'], delivery_id=headers['x-github-delivery'])
def time(self, target=None): """ Get server time. Optional arguments: * target=None - Target server. """ with self.lock: if target: self.send('TIME %s' % target) else: self.send('TIME') time = '' msg = self._recv(rm_colon=True, expected_replies=('391',)) if msg[0] == '391': time = msg[2].split(':', 1)[1] return time
def function[time, parameter[self, target]]: constant[ Get server time. Optional arguments: * target=None - Target server. ] with name[self].lock begin[:] if name[target] begin[:] call[name[self].send, parameter[binary_operation[constant[TIME %s] <ast.Mod object at 0x7da2590d6920> name[target]]]] variable[time] assign[=] constant[] variable[msg] assign[=] call[name[self]._recv, parameter[]] if compare[call[name[msg]][constant[0]] equal[==] constant[391]] begin[:] variable[time] assign[=] call[call[call[name[msg]][constant[2]].split, parameter[constant[:], constant[1]]]][constant[1]] return[name[time]]
keyword[def] identifier[time] ( identifier[self] , identifier[target] = keyword[None] ): literal[string] keyword[with] identifier[self] . identifier[lock] : keyword[if] identifier[target] : identifier[self] . identifier[send] ( literal[string] % identifier[target] ) keyword[else] : identifier[self] . identifier[send] ( literal[string] ) identifier[time] = literal[string] identifier[msg] = identifier[self] . identifier[_recv] ( identifier[rm_colon] = keyword[True] , identifier[expected_replies] =( literal[string] ,)) keyword[if] identifier[msg] [ literal[int] ]== literal[string] : identifier[time] = identifier[msg] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )[ literal[int] ] keyword[return] identifier[time]
def time(self, target=None): """ Get server time. Optional arguments: * target=None - Target server. """ with self.lock: if target: self.send('TIME %s' % target) # depends on [control=['if'], data=[]] else: self.send('TIME') time = '' msg = self._recv(rm_colon=True, expected_replies=('391',)) if msg[0] == '391': time = msg[2].split(':', 1)[1] # depends on [control=['if'], data=[]] return time # depends on [control=['with'], data=[]]
def cache_persist(self): """ Saves the current trained data to the cache. This is initiated by the program using this module """ filename = self.get_cache_location() pickle.dump(self.categories, open(filename, 'wb'))
def function[cache_persist, parameter[self]]: constant[ Saves the current trained data to the cache. This is initiated by the program using this module ] variable[filename] assign[=] call[name[self].get_cache_location, parameter[]] call[name[pickle].dump, parameter[name[self].categories, call[name[open], parameter[name[filename], constant[wb]]]]]
keyword[def] identifier[cache_persist] ( identifier[self] ): literal[string] identifier[filename] = identifier[self] . identifier[get_cache_location] () identifier[pickle] . identifier[dump] ( identifier[self] . identifier[categories] , identifier[open] ( identifier[filename] , literal[string] ))
def cache_persist(self): """ Saves the current trained data to the cache. This is initiated by the program using this module """ filename = self.get_cache_location() pickle.dump(self.categories, open(filename, 'wb'))
def area(self): """ Mesh surface area Returns ------- area : float Total area of the mesh. """ mprop = vtk.vtkMassProperties() mprop.SetInputData(self) return mprop.GetSurfaceArea()
def function[area, parameter[self]]: constant[ Mesh surface area Returns ------- area : float Total area of the mesh. ] variable[mprop] assign[=] call[name[vtk].vtkMassProperties, parameter[]] call[name[mprop].SetInputData, parameter[name[self]]] return[call[name[mprop].GetSurfaceArea, parameter[]]]
keyword[def] identifier[area] ( identifier[self] ): literal[string] identifier[mprop] = identifier[vtk] . identifier[vtkMassProperties] () identifier[mprop] . identifier[SetInputData] ( identifier[self] ) keyword[return] identifier[mprop] . identifier[GetSurfaceArea] ()
def area(self): """ Mesh surface area Returns ------- area : float Total area of the mesh. """ mprop = vtk.vtkMassProperties() mprop.SetInputData(self) return mprop.GetSurfaceArea()
def CheckApproversForLabel(self, token, client_urn, requester, approvers, label): """Checks if requester and approvers have approval privileges for labels. Checks against list of approvers for each label defined in approvers.yaml to determine if the list of approvers is sufficient. Args: token: user token client_urn: ClientURN object of the client requester: username string of person requesting approval. approvers: list of username strings that have approved this client. label: label strings to check approval privs for. Returns: True if access is allowed, raises otherwise. """ auth = self.reader.GetAuthorizationForSubject(label) if not auth: # This label isn't listed in approvers.yaml return True if auth.requester_must_be_authorized: if not self.CheckPermissions(requester, label): raise access_control.UnauthorizedAccess( "User %s not in %s or groups:%s for %s" % (requester, auth.users, auth.groups, label), subject=client_urn, requested_access=token.requested_access) approved_count = 0 for approver in approvers: if self.CheckPermissions(approver, label) and approver != requester: approved_count += 1 if approved_count < auth.num_approvers_required: raise access_control.UnauthorizedAccess( "Found %s approvers for %s, needed %s" % (approved_count, label, auth.num_approvers_required), subject=client_urn, requested_access=token.requested_access) return True
def function[CheckApproversForLabel, parameter[self, token, client_urn, requester, approvers, label]]: constant[Checks if requester and approvers have approval privileges for labels. Checks against list of approvers for each label defined in approvers.yaml to determine if the list of approvers is sufficient. Args: token: user token client_urn: ClientURN object of the client requester: username string of person requesting approval. approvers: list of username strings that have approved this client. label: label strings to check approval privs for. Returns: True if access is allowed, raises otherwise. ] variable[auth] assign[=] call[name[self].reader.GetAuthorizationForSubject, parameter[name[label]]] if <ast.UnaryOp object at 0x7da1b1c0cca0> begin[:] return[constant[True]] if name[auth].requester_must_be_authorized begin[:] if <ast.UnaryOp object at 0x7da1b1c0fe20> begin[:] <ast.Raise object at 0x7da1b1c0c880> variable[approved_count] assign[=] constant[0] for taget[name[approver]] in starred[name[approvers]] begin[:] if <ast.BoolOp object at 0x7da1b1ce9f00> begin[:] <ast.AugAssign object at 0x7da1b1c0c3d0> if compare[name[approved_count] less[<] name[auth].num_approvers_required] begin[:] <ast.Raise object at 0x7da1b1c0c220> return[constant[True]]
keyword[def] identifier[CheckApproversForLabel] ( identifier[self] , identifier[token] , identifier[client_urn] , identifier[requester] , identifier[approvers] , identifier[label] ): literal[string] identifier[auth] = identifier[self] . identifier[reader] . identifier[GetAuthorizationForSubject] ( identifier[label] ) keyword[if] keyword[not] identifier[auth] : keyword[return] keyword[True] keyword[if] identifier[auth] . identifier[requester_must_be_authorized] : keyword[if] keyword[not] identifier[self] . identifier[CheckPermissions] ( identifier[requester] , identifier[label] ): keyword[raise] identifier[access_control] . identifier[UnauthorizedAccess] ( literal[string] %( identifier[requester] , identifier[auth] . identifier[users] , identifier[auth] . identifier[groups] , identifier[label] ), identifier[subject] = identifier[client_urn] , identifier[requested_access] = identifier[token] . identifier[requested_access] ) identifier[approved_count] = literal[int] keyword[for] identifier[approver] keyword[in] identifier[approvers] : keyword[if] identifier[self] . identifier[CheckPermissions] ( identifier[approver] , identifier[label] ) keyword[and] identifier[approver] != identifier[requester] : identifier[approved_count] += literal[int] keyword[if] identifier[approved_count] < identifier[auth] . identifier[num_approvers_required] : keyword[raise] identifier[access_control] . identifier[UnauthorizedAccess] ( literal[string] % ( identifier[approved_count] , identifier[label] , identifier[auth] . identifier[num_approvers_required] ), identifier[subject] = identifier[client_urn] , identifier[requested_access] = identifier[token] . identifier[requested_access] ) keyword[return] keyword[True]
def CheckApproversForLabel(self, token, client_urn, requester, approvers, label): """Checks if requester and approvers have approval privileges for labels. Checks against list of approvers for each label defined in approvers.yaml to determine if the list of approvers is sufficient. Args: token: user token client_urn: ClientURN object of the client requester: username string of person requesting approval. approvers: list of username strings that have approved this client. label: label strings to check approval privs for. Returns: True if access is allowed, raises otherwise. """ auth = self.reader.GetAuthorizationForSubject(label) if not auth: # This label isn't listed in approvers.yaml return True # depends on [control=['if'], data=[]] if auth.requester_must_be_authorized: if not self.CheckPermissions(requester, label): raise access_control.UnauthorizedAccess('User %s not in %s or groups:%s for %s' % (requester, auth.users, auth.groups, label), subject=client_urn, requested_access=token.requested_access) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] approved_count = 0 for approver in approvers: if self.CheckPermissions(approver, label) and approver != requester: approved_count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['approver']] if approved_count < auth.num_approvers_required: raise access_control.UnauthorizedAccess('Found %s approvers for %s, needed %s' % (approved_count, label, auth.num_approvers_required), subject=client_urn, requested_access=token.requested_access) # depends on [control=['if'], data=['approved_count']] return True
def get_scaled(self, magnitude): """ Return a unit vector parallel to this one. """ result = self.copy() result.scale(magnitude) return result
def function[get_scaled, parameter[self, magnitude]]: constant[ Return a unit vector parallel to this one. ] variable[result] assign[=] call[name[self].copy, parameter[]] call[name[result].scale, parameter[name[magnitude]]] return[name[result]]
keyword[def] identifier[get_scaled] ( identifier[self] , identifier[magnitude] ): literal[string] identifier[result] = identifier[self] . identifier[copy] () identifier[result] . identifier[scale] ( identifier[magnitude] ) keyword[return] identifier[result]
def get_scaled(self, magnitude): """ Return a unit vector parallel to this one. """ result = self.copy() result.scale(magnitude) return result
def esummary(database: str, ids=False, webenv=False, query_key=False, count=False, retstart=False, retmax=False, api_key=False, email=False, **kwargs) -> Optional[List[EsummaryResult]]: """Get document summaries using the Entrez ESearch API. Parameters ---------- database : str Entez database to search. ids : list or str List of IDs to submit to the server. webenv : str An Entrez WebEnv to use saved history. query_key : str An Entrez query_key to use saved history. count : int Number of records in the webenv retstart : int Return values starting at this index. retmax : int Return at most this number of values. api_key : str A users API key which allows more requests per second email : str A users email which is required if not using API. Returns ------- list A list of EsummaryResults with values [id, srx, create_date, update_date] """ url = BASE_URL + f'esummary.fcgi?db={database}' url = check_webenv(webenv, url) url = check_query_key(query_key, url) url = check_api_key(api_key, url) url = check_email(email, url) if ids: if isinstance(ids, str): id = ids else: id = ','.join(ids) url += f'&id={id}' count = len(id.split(',')) for resp in entrez_sets_of_results(url, retstart, retmax, count): yield resp.text
def function[esummary, parameter[database, ids, webenv, query_key, count, retstart, retmax, api_key, email]]: constant[Get document summaries using the Entrez ESearch API. Parameters ---------- database : str Entez database to search. ids : list or str List of IDs to submit to the server. webenv : str An Entrez WebEnv to use saved history. query_key : str An Entrez query_key to use saved history. count : int Number of records in the webenv retstart : int Return values starting at this index. retmax : int Return at most this number of values. api_key : str A users API key which allows more requests per second email : str A users email which is required if not using API. Returns ------- list A list of EsummaryResults with values [id, srx, create_date, update_date] ] variable[url] assign[=] binary_operation[name[BASE_URL] + <ast.JoinedStr object at 0x7da20e961f30>] variable[url] assign[=] call[name[check_webenv], parameter[name[webenv], name[url]]] variable[url] assign[=] call[name[check_query_key], parameter[name[query_key], name[url]]] variable[url] assign[=] call[name[check_api_key], parameter[name[api_key], name[url]]] variable[url] assign[=] call[name[check_email], parameter[name[email], name[url]]] if name[ids] begin[:] if call[name[isinstance], parameter[name[ids], name[str]]] begin[:] variable[id] assign[=] name[ids] <ast.AugAssign object at 0x7da18f00fd00> variable[count] assign[=] call[name[len], parameter[call[name[id].split, parameter[constant[,]]]]] for taget[name[resp]] in starred[call[name[entrez_sets_of_results], parameter[name[url], name[retstart], name[retmax], name[count]]]] begin[:] <ast.Yield object at 0x7da18f00c520>
keyword[def] identifier[esummary] ( identifier[database] : identifier[str] , identifier[ids] = keyword[False] , identifier[webenv] = keyword[False] , identifier[query_key] = keyword[False] , identifier[count] = keyword[False] , identifier[retstart] = keyword[False] , identifier[retmax] = keyword[False] , identifier[api_key] = keyword[False] , identifier[email] = keyword[False] ,** identifier[kwargs] )-> identifier[Optional] [ identifier[List] [ identifier[EsummaryResult] ]]: literal[string] identifier[url] = identifier[BASE_URL] + literal[string] identifier[url] = identifier[check_webenv] ( identifier[webenv] , identifier[url] ) identifier[url] = identifier[check_query_key] ( identifier[query_key] , identifier[url] ) identifier[url] = identifier[check_api_key] ( identifier[api_key] , identifier[url] ) identifier[url] = identifier[check_email] ( identifier[email] , identifier[url] ) keyword[if] identifier[ids] : keyword[if] identifier[isinstance] ( identifier[ids] , identifier[str] ): identifier[id] = identifier[ids] keyword[else] : identifier[id] = literal[string] . identifier[join] ( identifier[ids] ) identifier[url] += literal[string] identifier[count] = identifier[len] ( identifier[id] . identifier[split] ( literal[string] )) keyword[for] identifier[resp] keyword[in] identifier[entrez_sets_of_results] ( identifier[url] , identifier[retstart] , identifier[retmax] , identifier[count] ): keyword[yield] identifier[resp] . identifier[text]
def esummary(database: str, ids=False, webenv=False, query_key=False, count=False, retstart=False, retmax=False, api_key=False, email=False, **kwargs) -> Optional[List[EsummaryResult]]: """Get document summaries using the Entrez ESearch API. Parameters ---------- database : str Entez database to search. ids : list or str List of IDs to submit to the server. webenv : str An Entrez WebEnv to use saved history. query_key : str An Entrez query_key to use saved history. count : int Number of records in the webenv retstart : int Return values starting at this index. retmax : int Return at most this number of values. api_key : str A users API key which allows more requests per second email : str A users email which is required if not using API. Returns ------- list A list of EsummaryResults with values [id, srx, create_date, update_date] """ url = BASE_URL + f'esummary.fcgi?db={database}' url = check_webenv(webenv, url) url = check_query_key(query_key, url) url = check_api_key(api_key, url) url = check_email(email, url) if ids: if isinstance(ids, str): id = ids # depends on [control=['if'], data=[]] else: id = ','.join(ids) url += f'&id={id}' count = len(id.split(',')) # depends on [control=['if'], data=[]] for resp in entrez_sets_of_results(url, retstart, retmax, count): yield resp.text # depends on [control=['for'], data=['resp']]
def in_file(self, fn: str) -> Iterator[InsertionPoint]: """ Returns an iterator over all of the insertion points in a given file. """ logger.debug("finding insertion points in file: %s", fn) yield from self.__file_insertions.get(fn, [])
def function[in_file, parameter[self, fn]]: constant[ Returns an iterator over all of the insertion points in a given file. ] call[name[logger].debug, parameter[constant[finding insertion points in file: %s], name[fn]]] <ast.YieldFrom object at 0x7da18fe92f20>
keyword[def] identifier[in_file] ( identifier[self] , identifier[fn] : identifier[str] )-> identifier[Iterator] [ identifier[InsertionPoint] ]: literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[fn] ) keyword[yield] keyword[from] identifier[self] . identifier[__file_insertions] . identifier[get] ( identifier[fn] ,[])
def in_file(self, fn: str) -> Iterator[InsertionPoint]: """ Returns an iterator over all of the insertion points in a given file. """ logger.debug('finding insertion points in file: %s', fn) yield from self.__file_insertions.get(fn, [])
def name(self): """User name (the same name as on the users community profile page). :rtype: str """ uid = self.user_id if self._iface_user.get_id() == uid: return self._iface.get_my_name() return self._iface.get_name(uid)
def function[name, parameter[self]]: constant[User name (the same name as on the users community profile page). :rtype: str ] variable[uid] assign[=] name[self].user_id if compare[call[name[self]._iface_user.get_id, parameter[]] equal[==] name[uid]] begin[:] return[call[name[self]._iface.get_my_name, parameter[]]] return[call[name[self]._iface.get_name, parameter[name[uid]]]]
keyword[def] identifier[name] ( identifier[self] ): literal[string] identifier[uid] = identifier[self] . identifier[user_id] keyword[if] identifier[self] . identifier[_iface_user] . identifier[get_id] ()== identifier[uid] : keyword[return] identifier[self] . identifier[_iface] . identifier[get_my_name] () keyword[return] identifier[self] . identifier[_iface] . identifier[get_name] ( identifier[uid] )
def name(self): """User name (the same name as on the users community profile page). :rtype: str """ uid = self.user_id if self._iface_user.get_id() == uid: return self._iface.get_my_name() # depends on [control=['if'], data=[]] return self._iface.get_name(uid)
def clear_feature_symlinks(self, feature_name): """ Clear the symlinks for a feature in the symlinked path """ logger.debug("Clearing feature symlinks for %s" % feature_name) feature_path = self.install_directory(feature_name) for d in ('bin', 'lib'): if os.path.exists(os.path.join(self.root_dir, d)): for link in os.listdir(os.path.join(self.root_dir, d)): path = os.path.join(self.root_dir, d, link) if feature_path in os.path.realpath(path): getattr(self, 'remove_from_%s' % d)(link)
def function[clear_feature_symlinks, parameter[self, feature_name]]: constant[ Clear the symlinks for a feature in the symlinked path ] call[name[logger].debug, parameter[binary_operation[constant[Clearing feature symlinks for %s] <ast.Mod object at 0x7da2590d6920> name[feature_name]]]] variable[feature_path] assign[=] call[name[self].install_directory, parameter[name[feature_name]]] for taget[name[d]] in starred[tuple[[<ast.Constant object at 0x7da18ede7fd0>, <ast.Constant object at 0x7da18ede4550>]]] begin[:] if call[name[os].path.exists, parameter[call[name[os].path.join, parameter[name[self].root_dir, name[d]]]]] begin[:] for taget[name[link]] in starred[call[name[os].listdir, parameter[call[name[os].path.join, parameter[name[self].root_dir, name[d]]]]]] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[name[self].root_dir, name[d], name[link]]] if compare[name[feature_path] in call[name[os].path.realpath, parameter[name[path]]]] begin[:] call[call[name[getattr], parameter[name[self], binary_operation[constant[remove_from_%s] <ast.Mod object at 0x7da2590d6920> name[d]]]], parameter[name[link]]]
keyword[def] identifier[clear_feature_symlinks] ( identifier[self] , identifier[feature_name] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] % identifier[feature_name] ) identifier[feature_path] = identifier[self] . identifier[install_directory] ( identifier[feature_name] ) keyword[for] identifier[d] keyword[in] ( literal[string] , literal[string] ): keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root_dir] , identifier[d] )): keyword[for] identifier[link] keyword[in] identifier[os] . identifier[listdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root_dir] , identifier[d] )): identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[root_dir] , identifier[d] , identifier[link] ) keyword[if] identifier[feature_path] keyword[in] identifier[os] . identifier[path] . identifier[realpath] ( identifier[path] ): identifier[getattr] ( identifier[self] , literal[string] % identifier[d] )( identifier[link] )
def clear_feature_symlinks(self, feature_name): """ Clear the symlinks for a feature in the symlinked path """ logger.debug('Clearing feature symlinks for %s' % feature_name) feature_path = self.install_directory(feature_name) for d in ('bin', 'lib'): if os.path.exists(os.path.join(self.root_dir, d)): for link in os.listdir(os.path.join(self.root_dir, d)): path = os.path.join(self.root_dir, d, link) if feature_path in os.path.realpath(path): getattr(self, 'remove_from_%s' % d)(link) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['link']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['d']]
def get_info(self): '''Return ResourceInfo instances.''' if self._min_disk: for path in self._resource_paths: usage = psutil.disk_usage(path) yield ResourceInfo(path, usage.free, self._min_disk) if self._min_memory: usage = psutil.virtual_memory() yield ResourceInfo(None, usage.available, self._min_memory)
def function[get_info, parameter[self]]: constant[Return ResourceInfo instances.] if name[self]._min_disk begin[:] for taget[name[path]] in starred[name[self]._resource_paths] begin[:] variable[usage] assign[=] call[name[psutil].disk_usage, parameter[name[path]]] <ast.Yield object at 0x7da2054a7100> if name[self]._min_memory begin[:] variable[usage] assign[=] call[name[psutil].virtual_memory, parameter[]] <ast.Yield object at 0x7da2054a7070>
keyword[def] identifier[get_info] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_min_disk] : keyword[for] identifier[path] keyword[in] identifier[self] . identifier[_resource_paths] : identifier[usage] = identifier[psutil] . identifier[disk_usage] ( identifier[path] ) keyword[yield] identifier[ResourceInfo] ( identifier[path] , identifier[usage] . identifier[free] , identifier[self] . identifier[_min_disk] ) keyword[if] identifier[self] . identifier[_min_memory] : identifier[usage] = identifier[psutil] . identifier[virtual_memory] () keyword[yield] identifier[ResourceInfo] ( keyword[None] , identifier[usage] . identifier[available] , identifier[self] . identifier[_min_memory] )
def get_info(self): """Return ResourceInfo instances.""" if self._min_disk: for path in self._resource_paths: usage = psutil.disk_usage(path) yield ResourceInfo(path, usage.free, self._min_disk) # depends on [control=['for'], data=['path']] # depends on [control=['if'], data=[]] if self._min_memory: usage = psutil.virtual_memory() yield ResourceInfo(None, usage.available, self._min_memory) # depends on [control=['if'], data=[]]
def print_napps(napps): """Print status, name and description.""" if not napps: print('No NApps found.') return stat_w = 6 # We already know the size of Status col name_w = max(len(n[1]) for n in napps) desc_w = max(len(n[2]) for n in napps) term_w = os.popen('stty size', 'r').read().split()[1] remaining = max(0, int(term_w) - stat_w - name_w - 6) desc_w = min(desc_w, remaining) widths = (stat_w, name_w, desc_w) header = '\n{:^%d} | {:^%d} | {:^%d}' % widths row = '{:^%d} | {:<%d} | {:<%d}' % widths print(header.format('Status', 'NApp ID', 'Description')) print('=+='.join('=' * w for w in widths)) for user, name, desc in napps: desc = (desc[:desc_w - 3] + '...') if len(desc) > desc_w else desc print(row.format(user, name, desc)) print('\nStatus: (i)nstalled, (e)nabled\n')
def function[print_napps, parameter[napps]]: constant[Print status, name and description.] if <ast.UnaryOp object at 0x7da1b242c6a0> begin[:] call[name[print], parameter[constant[No NApps found.]]] return[None] variable[stat_w] assign[=] constant[6] variable[name_w] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b242ff10>]] variable[desc_w] assign[=] call[name[max], parameter[<ast.GeneratorExp object at 0x7da1b242f220>]] variable[term_w] assign[=] call[call[call[call[name[os].popen, parameter[constant[stty size], constant[r]]].read, parameter[]].split, parameter[]]][constant[1]] variable[remaining] assign[=] call[name[max], parameter[constant[0], binary_operation[binary_operation[binary_operation[call[name[int], parameter[name[term_w]]] - name[stat_w]] - name[name_w]] - constant[6]]]] variable[desc_w] assign[=] call[name[min], parameter[name[desc_w], name[remaining]]] variable[widths] assign[=] tuple[[<ast.Name object at 0x7da18dc9bac0>, <ast.Name object at 0x7da18dc991e0>, <ast.Name object at 0x7da18dc9b850>]] variable[header] assign[=] binary_operation[constant[ {:^%d} | {:^%d} | {:^%d}] <ast.Mod object at 0x7da2590d6920> name[widths]] variable[row] assign[=] binary_operation[constant[{:^%d} | {:<%d} | {:<%d}] <ast.Mod object at 0x7da2590d6920> name[widths]] call[name[print], parameter[call[name[header].format, parameter[constant[Status], constant[NApp ID], constant[Description]]]]] call[name[print], parameter[call[constant[=+=].join, parameter[<ast.GeneratorExp object at 0x7da18dc9a020>]]]] for taget[tuple[[<ast.Name object at 0x7da20c6c6f50>, <ast.Name object at 0x7da20c6c7f10>, <ast.Name object at 0x7da20c6c43d0>]]] in starred[name[napps]] begin[:] variable[desc] assign[=] <ast.IfExp object at 0x7da20c6c6890> call[name[print], parameter[call[name[row].format, parameter[name[user], name[name], name[desc]]]]] call[name[print], parameter[constant[ Status: (i)nstalled, (e)nabled ]]]
keyword[def] identifier[print_napps] ( identifier[napps] ): literal[string] keyword[if] keyword[not] identifier[napps] : identifier[print] ( literal[string] ) keyword[return] identifier[stat_w] = literal[int] identifier[name_w] = identifier[max] ( identifier[len] ( identifier[n] [ literal[int] ]) keyword[for] identifier[n] keyword[in] identifier[napps] ) identifier[desc_w] = identifier[max] ( identifier[len] ( identifier[n] [ literal[int] ]) keyword[for] identifier[n] keyword[in] identifier[napps] ) identifier[term_w] = identifier[os] . identifier[popen] ( literal[string] , literal[string] ). identifier[read] (). identifier[split] ()[ literal[int] ] identifier[remaining] = identifier[max] ( literal[int] , identifier[int] ( identifier[term_w] )- identifier[stat_w] - identifier[name_w] - literal[int] ) identifier[desc_w] = identifier[min] ( identifier[desc_w] , identifier[remaining] ) identifier[widths] =( identifier[stat_w] , identifier[name_w] , identifier[desc_w] ) identifier[header] = literal[string] % identifier[widths] identifier[row] = literal[string] % identifier[widths] identifier[print] ( identifier[header] . identifier[format] ( literal[string] , literal[string] , literal[string] )) identifier[print] ( literal[string] . identifier[join] ( literal[string] * identifier[w] keyword[for] identifier[w] keyword[in] identifier[widths] )) keyword[for] identifier[user] , identifier[name] , identifier[desc] keyword[in] identifier[napps] : identifier[desc] =( identifier[desc] [: identifier[desc_w] - literal[int] ]+ literal[string] ) keyword[if] identifier[len] ( identifier[desc] )> identifier[desc_w] keyword[else] identifier[desc] identifier[print] ( identifier[row] . identifier[format] ( identifier[user] , identifier[name] , identifier[desc] )) identifier[print] ( literal[string] )
def print_napps(napps): """Print status, name and description.""" if not napps: print('No NApps found.') return # depends on [control=['if'], data=[]] stat_w = 6 # We already know the size of Status col name_w = max((len(n[1]) for n in napps)) desc_w = max((len(n[2]) for n in napps)) term_w = os.popen('stty size', 'r').read().split()[1] remaining = max(0, int(term_w) - stat_w - name_w - 6) desc_w = min(desc_w, remaining) widths = (stat_w, name_w, desc_w) header = '\n{:^%d} | {:^%d} | {:^%d}' % widths row = '{:^%d} | {:<%d} | {:<%d}' % widths print(header.format('Status', 'NApp ID', 'Description')) print('=+='.join(('=' * w for w in widths))) for (user, name, desc) in napps: desc = desc[:desc_w - 3] + '...' if len(desc) > desc_w else desc print(row.format(user, name, desc)) # depends on [control=['for'], data=[]] print('\nStatus: (i)nstalled, (e)nabled\n')
def validate_tpa_user_id(self, value): """ Validates the tpa_user_id, if is given, to see if there is an existing EnterpriseCustomerUser for it. It first uses the third party auth api to find the associated username to do the lookup. """ enterprise_customer = self.context.get('enterprise_customer') try: tpa_client = ThirdPartyAuthApiClient() username = tpa_client.get_username_from_remote_id( enterprise_customer.identity_provider, value ) user = User.objects.get(username=username) return models.EnterpriseCustomerUser.objects.get( user_id=user.id, enterprise_customer=enterprise_customer ) except (models.EnterpriseCustomerUser.DoesNotExist, User.DoesNotExist): pass return None
def function[validate_tpa_user_id, parameter[self, value]]: constant[ Validates the tpa_user_id, if is given, to see if there is an existing EnterpriseCustomerUser for it. It first uses the third party auth api to find the associated username to do the lookup. ] variable[enterprise_customer] assign[=] call[name[self].context.get, parameter[constant[enterprise_customer]]] <ast.Try object at 0x7da1b0121f60> return[constant[None]]
keyword[def] identifier[validate_tpa_user_id] ( identifier[self] , identifier[value] ): literal[string] identifier[enterprise_customer] = identifier[self] . identifier[context] . identifier[get] ( literal[string] ) keyword[try] : identifier[tpa_client] = identifier[ThirdPartyAuthApiClient] () identifier[username] = identifier[tpa_client] . identifier[get_username_from_remote_id] ( identifier[enterprise_customer] . identifier[identity_provider] , identifier[value] ) identifier[user] = identifier[User] . identifier[objects] . identifier[get] ( identifier[username] = identifier[username] ) keyword[return] identifier[models] . identifier[EnterpriseCustomerUser] . identifier[objects] . identifier[get] ( identifier[user_id] = identifier[user] . identifier[id] , identifier[enterprise_customer] = identifier[enterprise_customer] ) keyword[except] ( identifier[models] . identifier[EnterpriseCustomerUser] . identifier[DoesNotExist] , identifier[User] . identifier[DoesNotExist] ): keyword[pass] keyword[return] keyword[None]
def validate_tpa_user_id(self, value): """ Validates the tpa_user_id, if is given, to see if there is an existing EnterpriseCustomerUser for it. It first uses the third party auth api to find the associated username to do the lookup. """ enterprise_customer = self.context.get('enterprise_customer') try: tpa_client = ThirdPartyAuthApiClient() username = tpa_client.get_username_from_remote_id(enterprise_customer.identity_provider, value) user = User.objects.get(username=username) return models.EnterpriseCustomerUser.objects.get(user_id=user.id, enterprise_customer=enterprise_customer) # depends on [control=['try'], data=[]] except (models.EnterpriseCustomerUser.DoesNotExist, User.DoesNotExist): pass # depends on [control=['except'], data=[]] return None
def guess_uri_type(uri: str, hint: str=None): """Return a guess for the URI type based on the URI string `uri`. If `hint` is given, it is assumed to be the correct type. Otherwise, the URI is inspected using urlparse, and we try to guess whether it's a remote Git repository, a remote downloadable archive, or a local-only data. """ # TODO(itamar): do this better if hint: return hint norm_uri = uri.lower() parsed_uri = urlparse(norm_uri) if parsed_uri.path.endswith('.git'): return 'git' if parsed_uri.scheme in ('http', 'https'): ext = splitext(parsed_uri.path)[-1] if ext in KNOWN_ARCHIVES: return 'archive' return 'single' return 'local'
def function[guess_uri_type, parameter[uri, hint]]: constant[Return a guess for the URI type based on the URI string `uri`. If `hint` is given, it is assumed to be the correct type. Otherwise, the URI is inspected using urlparse, and we try to guess whether it's a remote Git repository, a remote downloadable archive, or a local-only data. ] if name[hint] begin[:] return[name[hint]] variable[norm_uri] assign[=] call[name[uri].lower, parameter[]] variable[parsed_uri] assign[=] call[name[urlparse], parameter[name[norm_uri]]] if call[name[parsed_uri].path.endswith, parameter[constant[.git]]] begin[:] return[constant[git]] if compare[name[parsed_uri].scheme in tuple[[<ast.Constant object at 0x7da1b18386a0>, <ast.Constant object at 0x7da1b1838ac0>]]] begin[:] variable[ext] assign[=] call[call[name[splitext], parameter[name[parsed_uri].path]]][<ast.UnaryOp object at 0x7da1b1838f70>] if compare[name[ext] in name[KNOWN_ARCHIVES]] begin[:] return[constant[archive]] return[constant[single]] return[constant[local]]
keyword[def] identifier[guess_uri_type] ( identifier[uri] : identifier[str] , identifier[hint] : identifier[str] = keyword[None] ): literal[string] keyword[if] identifier[hint] : keyword[return] identifier[hint] identifier[norm_uri] = identifier[uri] . identifier[lower] () identifier[parsed_uri] = identifier[urlparse] ( identifier[norm_uri] ) keyword[if] identifier[parsed_uri] . identifier[path] . identifier[endswith] ( literal[string] ): keyword[return] literal[string] keyword[if] identifier[parsed_uri] . identifier[scheme] keyword[in] ( literal[string] , literal[string] ): identifier[ext] = identifier[splitext] ( identifier[parsed_uri] . identifier[path] )[- literal[int] ] keyword[if] identifier[ext] keyword[in] identifier[KNOWN_ARCHIVES] : keyword[return] literal[string] keyword[return] literal[string] keyword[return] literal[string]
def guess_uri_type(uri: str, hint: str=None): """Return a guess for the URI type based on the URI string `uri`. If `hint` is given, it is assumed to be the correct type. Otherwise, the URI is inspected using urlparse, and we try to guess whether it's a remote Git repository, a remote downloadable archive, or a local-only data. """ # TODO(itamar): do this better if hint: return hint # depends on [control=['if'], data=[]] norm_uri = uri.lower() parsed_uri = urlparse(norm_uri) if parsed_uri.path.endswith('.git'): return 'git' # depends on [control=['if'], data=[]] if parsed_uri.scheme in ('http', 'https'): ext = splitext(parsed_uri.path)[-1] if ext in KNOWN_ARCHIVES: return 'archive' # depends on [control=['if'], data=[]] return 'single' # depends on [control=['if'], data=[]] return 'local'
def apply(self, doc): """ Generate MentionTables from a Document by parsing all of its Tables. :param doc: The ``Document`` to parse. :type doc: ``Document`` :raises TypeError: If the input doc is not of type ``Document``. """ if not isinstance(doc, Document): raise TypeError( "Input Contexts to MentionTables.apply() must be of type Document" ) for table in doc.tables: yield TemporaryTableMention(table)
def function[apply, parameter[self, doc]]: constant[ Generate MentionTables from a Document by parsing all of its Tables. :param doc: The ``Document`` to parse. :type doc: ``Document`` :raises TypeError: If the input doc is not of type ``Document``. ] if <ast.UnaryOp object at 0x7da1b1e8e440> begin[:] <ast.Raise object at 0x7da1b1e8fa00> for taget[name[table]] in starred[name[doc].tables] begin[:] <ast.Yield object at 0x7da1b1e8e260>
keyword[def] identifier[apply] ( identifier[self] , identifier[doc] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[doc] , identifier[Document] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[for] identifier[table] keyword[in] identifier[doc] . identifier[tables] : keyword[yield] identifier[TemporaryTableMention] ( identifier[table] )
def apply(self, doc): """ Generate MentionTables from a Document by parsing all of its Tables. :param doc: The ``Document`` to parse. :type doc: ``Document`` :raises TypeError: If the input doc is not of type ``Document``. """ if not isinstance(doc, Document): raise TypeError('Input Contexts to MentionTables.apply() must be of type Document') # depends on [control=['if'], data=[]] for table in doc.tables: yield TemporaryTableMention(table) # depends on [control=['for'], data=['table']]
def set_order_by_clip(self, a, b): ''' Determine which SplitPiece is the leftmost based on the side of the longest clipping operation ''' if self.is_left_clip(a.cigar): self.query_left = b self.query_right = a else: self.query_left = a self.query_right = b
def function[set_order_by_clip, parameter[self, a, b]]: constant[ Determine which SplitPiece is the leftmost based on the side of the longest clipping operation ] if call[name[self].is_left_clip, parameter[name[a].cigar]] begin[:] name[self].query_left assign[=] name[b] name[self].query_right assign[=] name[a]
keyword[def] identifier[set_order_by_clip] ( identifier[self] , identifier[a] , identifier[b] ): literal[string] keyword[if] identifier[self] . identifier[is_left_clip] ( identifier[a] . identifier[cigar] ): identifier[self] . identifier[query_left] = identifier[b] identifier[self] . identifier[query_right] = identifier[a] keyword[else] : identifier[self] . identifier[query_left] = identifier[a] identifier[self] . identifier[query_right] = identifier[b]
def set_order_by_clip(self, a, b): """ Determine which SplitPiece is the leftmost based on the side of the longest clipping operation """ if self.is_left_clip(a.cigar): self.query_left = b self.query_right = a # depends on [control=['if'], data=[]] else: self.query_left = a self.query_right = b
def remove_orbit(self, component=None, **kwargs): """ [NOT IMPLEMENTED] Shortcut to :meth:`remove_component` but with kind='star' """ kwargs.setdefault('kind', 'orbit') return self.remove_component(component, **kwargs)
def function[remove_orbit, parameter[self, component]]: constant[ [NOT IMPLEMENTED] Shortcut to :meth:`remove_component` but with kind='star' ] call[name[kwargs].setdefault, parameter[constant[kind], constant[orbit]]] return[call[name[self].remove_component, parameter[name[component]]]]
keyword[def] identifier[remove_orbit] ( identifier[self] , identifier[component] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[string] ) keyword[return] identifier[self] . identifier[remove_component] ( identifier[component] ,** identifier[kwargs] )
def remove_orbit(self, component=None, **kwargs): """ [NOT IMPLEMENTED] Shortcut to :meth:`remove_component` but with kind='star' """ kwargs.setdefault('kind', 'orbit') return self.remove_component(component, **kwargs)
def getDataset(self, id_): """ Returns a dataset with the specified ID, or raises a DatasetNotFoundException if it does not exist. """ if id_ not in self._datasetIdMap: raise exceptions.DatasetNotFoundException(id_) return self._datasetIdMap[id_]
def function[getDataset, parameter[self, id_]]: constant[ Returns a dataset with the specified ID, or raises a DatasetNotFoundException if it does not exist. ] if compare[name[id_] <ast.NotIn object at 0x7da2590d7190> name[self]._datasetIdMap] begin[:] <ast.Raise object at 0x7da18bccb310> return[call[name[self]._datasetIdMap][name[id_]]]
keyword[def] identifier[getDataset] ( identifier[self] , identifier[id_] ): literal[string] keyword[if] identifier[id_] keyword[not] keyword[in] identifier[self] . identifier[_datasetIdMap] : keyword[raise] identifier[exceptions] . identifier[DatasetNotFoundException] ( identifier[id_] ) keyword[return] identifier[self] . identifier[_datasetIdMap] [ identifier[id_] ]
def getDataset(self, id_): """ Returns a dataset with the specified ID, or raises a DatasetNotFoundException if it does not exist. """ if id_ not in self._datasetIdMap: raise exceptions.DatasetNotFoundException(id_) # depends on [control=['if'], data=['id_']] return self._datasetIdMap[id_]
def is_in_schedule_mode(self): """Returns True if base_station is currently on a scheduled mode.""" resource = "schedule" mode_event = self.publish_and_get_event(resource) if mode_event and mode_event.get("resource", None) == "schedule": properties = mode_event.get('properties') return properties.get("active", False) return False
def function[is_in_schedule_mode, parameter[self]]: constant[Returns True if base_station is currently on a scheduled mode.] variable[resource] assign[=] constant[schedule] variable[mode_event] assign[=] call[name[self].publish_and_get_event, parameter[name[resource]]] if <ast.BoolOp object at 0x7da2054a48b0> begin[:] variable[properties] assign[=] call[name[mode_event].get, parameter[constant[properties]]] return[call[name[properties].get, parameter[constant[active], constant[False]]]] return[constant[False]]
keyword[def] identifier[is_in_schedule_mode] ( identifier[self] ): literal[string] identifier[resource] = literal[string] identifier[mode_event] = identifier[self] . identifier[publish_and_get_event] ( identifier[resource] ) keyword[if] identifier[mode_event] keyword[and] identifier[mode_event] . identifier[get] ( literal[string] , keyword[None] )== literal[string] : identifier[properties] = identifier[mode_event] . identifier[get] ( literal[string] ) keyword[return] identifier[properties] . identifier[get] ( literal[string] , keyword[False] ) keyword[return] keyword[False]
def is_in_schedule_mode(self): """Returns True if base_station is currently on a scheduled mode.""" resource = 'schedule' mode_event = self.publish_and_get_event(resource) if mode_event and mode_event.get('resource', None) == 'schedule': properties = mode_event.get('properties') return properties.get('active', False) # depends on [control=['if'], data=[]] return False