code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _create_kvstore(kvstore, num_device, arg_params): """Create kvstore This function select and create a proper kvstore if given the kvstore type. Parameters ---------- kvstore : KVStore or str The kvstore. num_device : int The number of devices arg_params : dict of str to `NDArray`. Model parameter, dict of name to `NDArray` of net's weights. """ update_on_kvstore = bool(int(os.getenv('MXNET_UPDATE_ON_KVSTORE', "1"))) if kvstore is None: kv = None elif isinstance(kvstore, kvs.KVStore): kv = kvstore elif isinstance(kvstore, str): # create kvstore using the string type if num_device == 1 and 'dist' not in kvstore: # no need to use kv for single device and single machine kv = None else: kv = kvs.create(kvstore) if kvstore == 'local': # automatically select a proper local max_size = max(np.prod(param.shape) for param in arg_params.values()) if max_size > 1024 * 1024 * 16: update_on_kvstore = False else: raise TypeError('kvstore must be KVStore, str or None') if kv is None: update_on_kvstore = False return (kv, update_on_kvstore)
def function[_create_kvstore, parameter[kvstore, num_device, arg_params]]: constant[Create kvstore This function select and create a proper kvstore if given the kvstore type. Parameters ---------- kvstore : KVStore or str The kvstore. num_device : int The number of devices arg_params : dict of str to `NDArray`. Model parameter, dict of name to `NDArray` of net's weights. ] variable[update_on_kvstore] assign[=] call[name[bool], parameter[call[name[int], parameter[call[name[os].getenv, parameter[constant[MXNET_UPDATE_ON_KVSTORE], constant[1]]]]]]] if compare[name[kvstore] is constant[None]] begin[:] variable[kv] assign[=] constant[None] if compare[name[kv] is constant[None]] begin[:] variable[update_on_kvstore] assign[=] constant[False] return[tuple[[<ast.Name object at 0x7da1b1f8f3d0>, <ast.Name object at 0x7da1b1f8e2c0>]]]
keyword[def] identifier[_create_kvstore] ( identifier[kvstore] , identifier[num_device] , identifier[arg_params] ): literal[string] identifier[update_on_kvstore] = identifier[bool] ( identifier[int] ( identifier[os] . identifier[getenv] ( literal[string] , literal[string] ))) keyword[if] identifier[kvstore] keyword[is] keyword[None] : identifier[kv] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[kvstore] , identifier[kvs] . identifier[KVStore] ): identifier[kv] = identifier[kvstore] keyword[elif] identifier[isinstance] ( identifier[kvstore] , identifier[str] ): keyword[if] identifier[num_device] == literal[int] keyword[and] literal[string] keyword[not] keyword[in] identifier[kvstore] : identifier[kv] = keyword[None] keyword[else] : identifier[kv] = identifier[kvs] . identifier[create] ( identifier[kvstore] ) keyword[if] identifier[kvstore] == literal[string] : identifier[max_size] = identifier[max] ( identifier[np] . identifier[prod] ( identifier[param] . identifier[shape] ) keyword[for] identifier[param] keyword[in] identifier[arg_params] . identifier[values] ()) keyword[if] identifier[max_size] > literal[int] * literal[int] * literal[int] : identifier[update_on_kvstore] = keyword[False] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[kv] keyword[is] keyword[None] : identifier[update_on_kvstore] = keyword[False] keyword[return] ( identifier[kv] , identifier[update_on_kvstore] )
def _create_kvstore(kvstore, num_device, arg_params): """Create kvstore This function select and create a proper kvstore if given the kvstore type. Parameters ---------- kvstore : KVStore or str The kvstore. num_device : int The number of devices arg_params : dict of str to `NDArray`. Model parameter, dict of name to `NDArray` of net's weights. """ update_on_kvstore = bool(int(os.getenv('MXNET_UPDATE_ON_KVSTORE', '1'))) if kvstore is None: kv = None # depends on [control=['if'], data=[]] elif isinstance(kvstore, kvs.KVStore): kv = kvstore # depends on [control=['if'], data=[]] elif isinstance(kvstore, str): # create kvstore using the string type if num_device == 1 and 'dist' not in kvstore: # no need to use kv for single device and single machine kv = None # depends on [control=['if'], data=[]] else: kv = kvs.create(kvstore) if kvstore == 'local': # automatically select a proper local max_size = max((np.prod(param.shape) for param in arg_params.values())) if max_size > 1024 * 1024 * 16: update_on_kvstore = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise TypeError('kvstore must be KVStore, str or None') if kv is None: update_on_kvstore = False # depends on [control=['if'], data=[]] return (kv, update_on_kvstore)
def get_user_lists(self, course, aggregationid=''): """ Get the available student and tutor lists for aggregation edition""" tutor_list = course.get_staff() # Determine student list and if they are grouped student_list = list(self.database.aggregations.aggregate([ {"$match": {"courseid": course.get_id()}}, {"$unwind": "$students"}, {"$project": { "classroom": "$_id", "students": 1, "grouped": { "$anyElementTrue": { "$map": { "input": "$groups.students", "as": "group", "in": { "$anyElementTrue": { "$map": { "input": "$$group", "as": "groupmember", "in": {"$eq": ["$$groupmember", "$students"]} } } } } } } }} ])) student_list = dict([(student["students"], student) for student in student_list]) users_info = self.user_manager.get_users_info(list(student_list.keys()) + tutor_list) if aggregationid: # Order the non-registered students other_students = [student_list[entry]['students'] for entry in student_list.keys() if not student_list[entry]['classroom'] == ObjectId(aggregationid)] other_students = sorted(other_students, key=lambda val: (("0"+users_info[val][0]) if users_info[val] else ("1"+val))) return student_list, tutor_list, other_students, users_info else: return student_list, tutor_list, users_info
def function[get_user_lists, parameter[self, course, aggregationid]]: constant[ Get the available student and tutor lists for aggregation edition] variable[tutor_list] assign[=] call[name[course].get_staff, parameter[]] variable[student_list] assign[=] call[name[list], parameter[call[name[self].database.aggregations.aggregate, parameter[list[[<ast.Dict object at 0x7da18c4cc100>, <ast.Dict object at 0x7da18c4cded0>, <ast.Dict object at 0x7da18c4cd120>]]]]]] variable[student_list] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18dc07a60>]] variable[users_info] assign[=] call[name[self].user_manager.get_users_info, parameter[binary_operation[call[name[list], parameter[call[name[student_list].keys, parameter[]]]] + name[tutor_list]]]] if name[aggregationid] begin[:] variable[other_students] assign[=] <ast.ListComp object at 0x7da18dc04bb0> variable[other_students] assign[=] call[name[sorted], parameter[name[other_students]]] return[tuple[[<ast.Name object at 0x7da18dc06170>, <ast.Name object at 0x7da18dc05bd0>, <ast.Name object at 0x7da18dc06b60>, <ast.Name object at 0x7da18dc05810>]]]
keyword[def] identifier[get_user_lists] ( identifier[self] , identifier[course] , identifier[aggregationid] = literal[string] ): literal[string] identifier[tutor_list] = identifier[course] . identifier[get_staff] () identifier[student_list] = identifier[list] ( identifier[self] . identifier[database] . identifier[aggregations] . identifier[aggregate] ([ { literal[string] :{ literal[string] : identifier[course] . identifier[get_id] ()}}, { literal[string] : literal[string] }, { literal[string] :{ literal[string] : literal[string] , literal[string] : literal[int] , literal[string] :{ literal[string] :{ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] :{ literal[string] :{ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] :{ literal[string] :[ literal[string] , literal[string] ]} } } } } } } }} ])) identifier[student_list] = identifier[dict] ([( identifier[student] [ literal[string] ], identifier[student] ) keyword[for] identifier[student] keyword[in] identifier[student_list] ]) identifier[users_info] = identifier[self] . identifier[user_manager] . identifier[get_users_info] ( identifier[list] ( identifier[student_list] . identifier[keys] ())+ identifier[tutor_list] ) keyword[if] identifier[aggregationid] : identifier[other_students] =[ identifier[student_list] [ identifier[entry] ][ literal[string] ] keyword[for] identifier[entry] keyword[in] identifier[student_list] . identifier[keys] () keyword[if] keyword[not] identifier[student_list] [ identifier[entry] ][ literal[string] ]== identifier[ObjectId] ( identifier[aggregationid] )] identifier[other_students] = identifier[sorted] ( identifier[other_students] , identifier[key] = keyword[lambda] identifier[val] :(( literal[string] + identifier[users_info] [ identifier[val] ][ literal[int] ]) keyword[if] identifier[users_info] [ identifier[val] ] keyword[else] ( literal[string] + identifier[val] ))) keyword[return] identifier[student_list] , identifier[tutor_list] , identifier[other_students] , identifier[users_info] keyword[else] : keyword[return] identifier[student_list] , identifier[tutor_list] , identifier[users_info]
def get_user_lists(self, course, aggregationid=''): """ Get the available student and tutor lists for aggregation edition""" tutor_list = course.get_staff() # Determine student list and if they are grouped student_list = list(self.database.aggregations.aggregate([{'$match': {'courseid': course.get_id()}}, {'$unwind': '$students'}, {'$project': {'classroom': '$_id', 'students': 1, 'grouped': {'$anyElementTrue': {'$map': {'input': '$groups.students', 'as': 'group', 'in': {'$anyElementTrue': {'$map': {'input': '$$group', 'as': 'groupmember', 'in': {'$eq': ['$$groupmember', '$students']}}}}}}}}}])) student_list = dict([(student['students'], student) for student in student_list]) users_info = self.user_manager.get_users_info(list(student_list.keys()) + tutor_list) if aggregationid: # Order the non-registered students other_students = [student_list[entry]['students'] for entry in student_list.keys() if not student_list[entry]['classroom'] == ObjectId(aggregationid)] other_students = sorted(other_students, key=lambda val: '0' + users_info[val][0] if users_info[val] else '1' + val) return (student_list, tutor_list, other_students, users_info) # depends on [control=['if'], data=[]] else: return (student_list, tutor_list, users_info)
def create(cls, name, min_dst_port, max_dst_port=None, min_src_port=None, max_src_port=None, protocol_agent=None, comment=None): """ Create the TCP service :param str name: name of tcp service :param int min_dst_port: minimum destination port value :param int max_dst_port: maximum destination port value :param int min_src_port: minimum source port value :param int max_src_port: maximum source port value :param str,ProtocolAgent protocol_agent: optional protocol agent for this service :param str comment: optional comment for service :raises CreateElementFailed: failure creating element with reason :return: instance with meta :rtype: TCPService """ max_dst_port = max_dst_port if max_dst_port is not None else '' json = {'name': name, 'min_dst_port': min_dst_port, 'max_dst_port': max_dst_port, 'min_src_port': min_src_port, 'max_src_port': max_src_port, 'protocol_agent_ref': element_resolver(protocol_agent) or None, 'comment': comment} return ElementCreator(cls, json)
def function[create, parameter[cls, name, min_dst_port, max_dst_port, min_src_port, max_src_port, protocol_agent, comment]]: constant[ Create the TCP service :param str name: name of tcp service :param int min_dst_port: minimum destination port value :param int max_dst_port: maximum destination port value :param int min_src_port: minimum source port value :param int max_src_port: maximum source port value :param str,ProtocolAgent protocol_agent: optional protocol agent for this service :param str comment: optional comment for service :raises CreateElementFailed: failure creating element with reason :return: instance with meta :rtype: TCPService ] variable[max_dst_port] assign[=] <ast.IfExp object at 0x7da1b1a2cf10> variable[json] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a2d0f0>, <ast.Constant object at 0x7da1b1a2e6b0>, <ast.Constant object at 0x7da1b1a2d960>, <ast.Constant object at 0x7da1b1a2dc30>, <ast.Constant object at 0x7da1b1a2f640>, <ast.Constant object at 0x7da1b1a2db10>, <ast.Constant object at 0x7da1b1a2c7f0>], [<ast.Name object at 0x7da1b1a2d660>, <ast.Name object at 0x7da1b1a2d570>, <ast.Name object at 0x7da1b1a2f6d0>, <ast.Name object at 0x7da1b1a2f0d0>, <ast.Name object at 0x7da1b1a2d9c0>, <ast.BoolOp object at 0x7da1b1a2fe20>, <ast.Name object at 0x7da1b1a2e680>]] return[call[name[ElementCreator], parameter[name[cls], name[json]]]]
keyword[def] identifier[create] ( identifier[cls] , identifier[name] , identifier[min_dst_port] , identifier[max_dst_port] = keyword[None] , identifier[min_src_port] = keyword[None] , identifier[max_src_port] = keyword[None] , identifier[protocol_agent] = keyword[None] , identifier[comment] = keyword[None] ): literal[string] identifier[max_dst_port] = identifier[max_dst_port] keyword[if] identifier[max_dst_port] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] identifier[json] ={ literal[string] : identifier[name] , literal[string] : identifier[min_dst_port] , literal[string] : identifier[max_dst_port] , literal[string] : identifier[min_src_port] , literal[string] : identifier[max_src_port] , literal[string] : identifier[element_resolver] ( identifier[protocol_agent] ) keyword[or] keyword[None] , literal[string] : identifier[comment] } keyword[return] identifier[ElementCreator] ( identifier[cls] , identifier[json] )
def create(cls, name, min_dst_port, max_dst_port=None, min_src_port=None, max_src_port=None, protocol_agent=None, comment=None): """ Create the TCP service :param str name: name of tcp service :param int min_dst_port: minimum destination port value :param int max_dst_port: maximum destination port value :param int min_src_port: minimum source port value :param int max_src_port: maximum source port value :param str,ProtocolAgent protocol_agent: optional protocol agent for this service :param str comment: optional comment for service :raises CreateElementFailed: failure creating element with reason :return: instance with meta :rtype: TCPService """ max_dst_port = max_dst_port if max_dst_port is not None else '' json = {'name': name, 'min_dst_port': min_dst_port, 'max_dst_port': max_dst_port, 'min_src_port': min_src_port, 'max_src_port': max_src_port, 'protocol_agent_ref': element_resolver(protocol_agent) or None, 'comment': comment} return ElementCreator(cls, json)
def on_data(self, data): """ The function called when new data has arrived. :param data: The list of data records received. """ for d in data: self._populate_sub_entity(d, 'Device') self._populate_sub_entity(d, 'Rule') date = dates.localize_datetime(d['activeFrom']) click.echo( '[{date}] {device} ({rule})'.format(date=date, device=d['device'].get('name', '**Unknown Vehicle'), rule=d['rule'].get('name', '**Unknown Rule')))
def function[on_data, parameter[self, data]]: constant[ The function called when new data has arrived. :param data: The list of data records received. ] for taget[name[d]] in starred[name[data]] begin[:] call[name[self]._populate_sub_entity, parameter[name[d], constant[Device]]] call[name[self]._populate_sub_entity, parameter[name[d], constant[Rule]]] variable[date] assign[=] call[name[dates].localize_datetime, parameter[call[name[d]][constant[activeFrom]]]] call[name[click].echo, parameter[call[constant[[{date}] {device} ({rule})].format, parameter[]]]]
keyword[def] identifier[on_data] ( identifier[self] , identifier[data] ): literal[string] keyword[for] identifier[d] keyword[in] identifier[data] : identifier[self] . identifier[_populate_sub_entity] ( identifier[d] , literal[string] ) identifier[self] . identifier[_populate_sub_entity] ( identifier[d] , literal[string] ) identifier[date] = identifier[dates] . identifier[localize_datetime] ( identifier[d] [ literal[string] ]) identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[date] = identifier[date] , identifier[device] = identifier[d] [ literal[string] ]. identifier[get] ( literal[string] , literal[string] ), identifier[rule] = identifier[d] [ literal[string] ]. identifier[get] ( literal[string] , literal[string] )))
def on_data(self, data): """ The function called when new data has arrived. :param data: The list of data records received. """ for d in data: self._populate_sub_entity(d, 'Device') self._populate_sub_entity(d, 'Rule') date = dates.localize_datetime(d['activeFrom']) click.echo('[{date}] {device} ({rule})'.format(date=date, device=d['device'].get('name', '**Unknown Vehicle'), rule=d['rule'].get('name', '**Unknown Rule'))) # depends on [control=['for'], data=['d']]
def _get_decision_trees_bulk(self, payload, valid_indices, invalid_indices, invalid_dts): """Tool for the function get_decision_trees_bulk. :param list payload: contains the informations necessary for getting the trees. Its form is the same than for the function. get_decision_trees_bulk. :param list valid_indices: list of the indices of the valid agent id. :param list invalid_indices: list of the indices of the valid agent id. :param list invalid_dts: list of the invalid agent id. :return: decision trees. :rtype: list of dict. """ valid_dts = self._create_and_send_json_bulk([payload[i] for i in valid_indices], "{}/bulk/decision_tree".format(self._base_url), "POST") if invalid_indices == []: return valid_dts # Put the valid and invalid decision trees in their original index return self._recreate_list_with_indices(valid_indices, valid_dts, invalid_indices, invalid_dts)
def function[_get_decision_trees_bulk, parameter[self, payload, valid_indices, invalid_indices, invalid_dts]]: constant[Tool for the function get_decision_trees_bulk. :param list payload: contains the informations necessary for getting the trees. Its form is the same than for the function. get_decision_trees_bulk. :param list valid_indices: list of the indices of the valid agent id. :param list invalid_indices: list of the indices of the valid agent id. :param list invalid_dts: list of the invalid agent id. :return: decision trees. :rtype: list of dict. ] variable[valid_dts] assign[=] call[name[self]._create_and_send_json_bulk, parameter[<ast.ListComp object at 0x7da2054a5960>, call[constant[{}/bulk/decision_tree].format, parameter[name[self]._base_url]], constant[POST]]] if compare[name[invalid_indices] equal[==] list[[]]] begin[:] return[name[valid_dts]] return[call[name[self]._recreate_list_with_indices, parameter[name[valid_indices], name[valid_dts], name[invalid_indices], name[invalid_dts]]]]
keyword[def] identifier[_get_decision_trees_bulk] ( identifier[self] , identifier[payload] , identifier[valid_indices] , identifier[invalid_indices] , identifier[invalid_dts] ): literal[string] identifier[valid_dts] = identifier[self] . identifier[_create_and_send_json_bulk] ([ identifier[payload] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[valid_indices] ], literal[string] . identifier[format] ( identifier[self] . identifier[_base_url] ), literal[string] ) keyword[if] identifier[invalid_indices] ==[]: keyword[return] identifier[valid_dts] keyword[return] identifier[self] . identifier[_recreate_list_with_indices] ( identifier[valid_indices] , identifier[valid_dts] , identifier[invalid_indices] , identifier[invalid_dts] )
def _get_decision_trees_bulk(self, payload, valid_indices, invalid_indices, invalid_dts): """Tool for the function get_decision_trees_bulk. :param list payload: contains the informations necessary for getting the trees. Its form is the same than for the function. get_decision_trees_bulk. :param list valid_indices: list of the indices of the valid agent id. :param list invalid_indices: list of the indices of the valid agent id. :param list invalid_dts: list of the invalid agent id. :return: decision trees. :rtype: list of dict. """ valid_dts = self._create_and_send_json_bulk([payload[i] for i in valid_indices], '{}/bulk/decision_tree'.format(self._base_url), 'POST') if invalid_indices == []: return valid_dts # depends on [control=['if'], data=[]] # Put the valid and invalid decision trees in their original index return self._recreate_list_with_indices(valid_indices, valid_dts, invalid_indices, invalid_dts)
def connect_to_wifi(self, ssid, password=None): """ [Test Agent] Connect to *ssid* with *password* """ cmd = 'am broadcast -a testagent -e action CONNECT_TO_WIFI -e ssid %s -e password %s' % (ssid, password) self.adb.shell_cmd(cmd)
def function[connect_to_wifi, parameter[self, ssid, password]]: constant[ [Test Agent] Connect to *ssid* with *password* ] variable[cmd] assign[=] binary_operation[constant[am broadcast -a testagent -e action CONNECT_TO_WIFI -e ssid %s -e password %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc9b550>, <ast.Name object at 0x7da18dc9b460>]]] call[name[self].adb.shell_cmd, parameter[name[cmd]]]
keyword[def] identifier[connect_to_wifi] ( identifier[self] , identifier[ssid] , identifier[password] = keyword[None] ): literal[string] identifier[cmd] = literal[string] %( identifier[ssid] , identifier[password] ) identifier[self] . identifier[adb] . identifier[shell_cmd] ( identifier[cmd] )
def connect_to_wifi(self, ssid, password=None): """ [Test Agent] Connect to *ssid* with *password* """ cmd = 'am broadcast -a testagent -e action CONNECT_TO_WIFI -e ssid %s -e password %s' % (ssid, password) self.adb.shell_cmd(cmd)
def transform(self, path): """ Transform a path into an actual Python object. The path can be arbitrary long. You can pass the path to a package, a module, a class, a function or a global variable, as deep as you want, as long as the deepest module is importable through ``importlib.import_module`` and each object is obtainable through the ``getattr`` method. Local objects will not work. Args: path (str): the dot-separated path of the object. Returns: object: the imported module or obtained object. """ if path is None or not path: return None obj_parent_modules = path.split(".") objects = [obj_parent_modules.pop(-1)] while True: try: parent_module_path = ".".join(obj_parent_modules) parent_module = importlib.import_module(parent_module_path) break except ImportError: if len(obj_parent_modules) == 1: raise ImportError("No module named '%s'" % obj_parent_modules[0]) objects.insert(0, obj_parent_modules.pop(-1)) current_object = parent_module for obj in objects: current_object = getattr(current_object, obj) return current_object
def function[transform, parameter[self, path]]: constant[ Transform a path into an actual Python object. The path can be arbitrary long. You can pass the path to a package, a module, a class, a function or a global variable, as deep as you want, as long as the deepest module is importable through ``importlib.import_module`` and each object is obtainable through the ``getattr`` method. Local objects will not work. Args: path (str): the dot-separated path of the object. Returns: object: the imported module or obtained object. ] if <ast.BoolOp object at 0x7da2047e8250> begin[:] return[constant[None]] variable[obj_parent_modules] assign[=] call[name[path].split, parameter[constant[.]]] variable[objects] assign[=] list[[<ast.Call object at 0x7da2047ea470>]] while constant[True] begin[:] <ast.Try object at 0x7da2047e80a0> variable[current_object] assign[=] name[parent_module] for taget[name[obj]] in starred[name[objects]] begin[:] variable[current_object] assign[=] call[name[getattr], parameter[name[current_object], name[obj]]] return[name[current_object]]
keyword[def] identifier[transform] ( identifier[self] , identifier[path] ): literal[string] keyword[if] identifier[path] keyword[is] keyword[None] keyword[or] keyword[not] identifier[path] : keyword[return] keyword[None] identifier[obj_parent_modules] = identifier[path] . identifier[split] ( literal[string] ) identifier[objects] =[ identifier[obj_parent_modules] . identifier[pop] (- literal[int] )] keyword[while] keyword[True] : keyword[try] : identifier[parent_module_path] = literal[string] . identifier[join] ( identifier[obj_parent_modules] ) identifier[parent_module] = identifier[importlib] . identifier[import_module] ( identifier[parent_module_path] ) keyword[break] keyword[except] identifier[ImportError] : keyword[if] identifier[len] ( identifier[obj_parent_modules] )== literal[int] : keyword[raise] identifier[ImportError] ( literal[string] % identifier[obj_parent_modules] [ literal[int] ]) identifier[objects] . identifier[insert] ( literal[int] , identifier[obj_parent_modules] . identifier[pop] (- literal[int] )) identifier[current_object] = identifier[parent_module] keyword[for] identifier[obj] keyword[in] identifier[objects] : identifier[current_object] = identifier[getattr] ( identifier[current_object] , identifier[obj] ) keyword[return] identifier[current_object]
def transform(self, path): """ Transform a path into an actual Python object. The path can be arbitrary long. You can pass the path to a package, a module, a class, a function or a global variable, as deep as you want, as long as the deepest module is importable through ``importlib.import_module`` and each object is obtainable through the ``getattr`` method. Local objects will not work. Args: path (str): the dot-separated path of the object. Returns: object: the imported module or obtained object. """ if path is None or not path: return None # depends on [control=['if'], data=[]] obj_parent_modules = path.split('.') objects = [obj_parent_modules.pop(-1)] while True: try: parent_module_path = '.'.join(obj_parent_modules) parent_module = importlib.import_module(parent_module_path) break # depends on [control=['try'], data=[]] except ImportError: if len(obj_parent_modules) == 1: raise ImportError("No module named '%s'" % obj_parent_modules[0]) # depends on [control=['if'], data=[]] objects.insert(0, obj_parent_modules.pop(-1)) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] current_object = parent_module for obj in objects: current_object = getattr(current_object, obj) # depends on [control=['for'], data=['obj']] return current_object
def _set_rpc(self, rpc_type: str) -> None: """ Sets rpc based on the type :param rpc_type: The type of connection: like infura, ganache, localhost :return: """ if rpc_type == "infura": self.set_api_rpc_infura() elif rpc_type == "localhost": self.set_api_rpc_localhost() else: self.set_api_rpc(rpc_type)
def function[_set_rpc, parameter[self, rpc_type]]: constant[ Sets rpc based on the type :param rpc_type: The type of connection: like infura, ganache, localhost :return: ] if compare[name[rpc_type] equal[==] constant[infura]] begin[:] call[name[self].set_api_rpc_infura, parameter[]]
keyword[def] identifier[_set_rpc] ( identifier[self] , identifier[rpc_type] : identifier[str] )-> keyword[None] : literal[string] keyword[if] identifier[rpc_type] == literal[string] : identifier[self] . identifier[set_api_rpc_infura] () keyword[elif] identifier[rpc_type] == literal[string] : identifier[self] . identifier[set_api_rpc_localhost] () keyword[else] : identifier[self] . identifier[set_api_rpc] ( identifier[rpc_type] )
def _set_rpc(self, rpc_type: str) -> None: """ Sets rpc based on the type :param rpc_type: The type of connection: like infura, ganache, localhost :return: """ if rpc_type == 'infura': self.set_api_rpc_infura() # depends on [control=['if'], data=[]] elif rpc_type == 'localhost': self.set_api_rpc_localhost() # depends on [control=['if'], data=[]] else: self.set_api_rpc(rpc_type)
def _compute_schoenfeld_within_strata(self, X, T, E, weights): """ A positive value of the residual shows an X value that is higher than expected at that death time. """ # TODO: the diff_against is gross # This uses Efron ties. n, d = X.shape if not np.any(E): # sometimes strata have no deaths. This means nothing is returned # in the below code. return np.zeros((n, d)) # Init risk and tie sums to zero risk_phi, tie_phi = 0, 0 risk_phi_x, tie_phi_x = np.zeros((1, d)), np.zeros((1, d)) # Init number of ties and weights weight_count = 0.0 tie_count = 0 scores = weights * np.exp(np.dot(X, self.hazards_)) diff_against = [] schoenfeld_residuals = np.empty((0, d)) # Iterate backwards to utilize recursive relationship for i in range(n - 1, -1, -1): # Doing it like this to preserve shape ti = T[i] ei = E[i] xi = X[i : i + 1] score = scores[i : i + 1] w = weights[i] # Calculate phi values phi_i = score phi_x_i = phi_i * xi # Calculate sums of Risk set risk_phi = risk_phi + phi_i risk_phi_x = risk_phi_x + phi_x_i # Calculate sums of Ties, if this is an event diff_against.append((xi, ei)) if ei: tie_phi = tie_phi + phi_i tie_phi_x = tie_phi_x + phi_x_i # Keep track of count tie_count += 1 # aka death counts weight_count += w if i > 0 and T[i - 1] == ti: # There are more ties/members of the risk set continue elif tie_count == 0: for _ in diff_against: schoenfeld_residuals = np.append(schoenfeld_residuals, np.zeros((1, d)), axis=0) diff_against = [] continue # There was atleast one event and no more ties remain. Time to sum. weighted_mean = np.zeros((1, d)) for l in range(tie_count): numer = risk_phi_x - l * tie_phi_x / tie_count denom = risk_phi - l * tie_phi / tie_count weighted_mean += numer / (denom * tie_count) for xi, ei in diff_against: schoenfeld_residuals = np.append(schoenfeld_residuals, ei * (xi - weighted_mean), axis=0) # reset tie values tie_count = 0 weight_count = 0.0 tie_phi = 0 tie_phi_x = np.zeros((1, d)) diff_against = [] return schoenfeld_residuals[::-1]
def function[_compute_schoenfeld_within_strata, parameter[self, X, T, E, weights]]: constant[ A positive value of the residual shows an X value that is higher than expected at that death time. ] <ast.Tuple object at 0x7da20c992c50> assign[=] name[X].shape if <ast.UnaryOp object at 0x7da20c992290> begin[:] return[call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da20c993a60>, <ast.Name object at 0x7da20c990f10>]]]]] <ast.Tuple object at 0x7da20c991ed0> assign[=] tuple[[<ast.Constant object at 0x7da20c991390>, <ast.Constant object at 0x7da20c991420>]] <ast.Tuple object at 0x7da20c9900a0> assign[=] tuple[[<ast.Call object at 0x7da20c992980>, <ast.Call object at 0x7da20c9902e0>]] variable[weight_count] assign[=] constant[0.0] variable[tie_count] assign[=] constant[0] variable[scores] assign[=] binary_operation[name[weights] * call[name[np].exp, parameter[call[name[np].dot, parameter[name[X], name[self].hazards_]]]]] variable[diff_against] assign[=] list[[]] variable[schoenfeld_residuals] assign[=] call[name[np].empty, parameter[tuple[[<ast.Constant object at 0x7da20c991f60>, <ast.Name object at 0x7da20c993880>]]]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[n] - constant[1]], <ast.UnaryOp object at 0x7da20c992e30>, <ast.UnaryOp object at 0x7da20c991d80>]]] begin[:] variable[ti] assign[=] call[name[T]][name[i]] variable[ei] assign[=] call[name[E]][name[i]] variable[xi] assign[=] call[name[X]][<ast.Slice object at 0x7da20c993e80>] variable[score] assign[=] call[name[scores]][<ast.Slice object at 0x7da20c992f80>] variable[w] assign[=] call[name[weights]][name[i]] variable[phi_i] assign[=] name[score] variable[phi_x_i] assign[=] binary_operation[name[phi_i] * name[xi]] variable[risk_phi] assign[=] binary_operation[name[risk_phi] + name[phi_i]] variable[risk_phi_x] assign[=] binary_operation[name[risk_phi_x] + name[phi_x_i]] call[name[diff_against].append, parameter[tuple[[<ast.Name object at 0x7da20c993d00>, <ast.Name object at 0x7da20c990550>]]]] if name[ei] begin[:] variable[tie_phi] assign[=] binary_operation[name[tie_phi] + name[phi_i]] variable[tie_phi_x] assign[=] binary_operation[name[tie_phi_x] + name[phi_x_i]] <ast.AugAssign object at 0x7da20c991180> <ast.AugAssign object at 0x7da20c993970> if <ast.BoolOp object at 0x7da20c992d40> begin[:] continue variable[weighted_mean] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da20c990e80>, <ast.Name object at 0x7da20c9937f0>]]]] for taget[name[l]] in starred[call[name[range], parameter[name[tie_count]]]] begin[:] variable[numer] assign[=] binary_operation[name[risk_phi_x] - binary_operation[binary_operation[name[l] * name[tie_phi_x]] / name[tie_count]]] variable[denom] assign[=] binary_operation[name[risk_phi] - binary_operation[binary_operation[name[l] * name[tie_phi]] / name[tie_count]]] <ast.AugAssign object at 0x7da20c992ef0> for taget[tuple[[<ast.Name object at 0x7da20c6ab9a0>, <ast.Name object at 0x7da20c6ab460>]]] in starred[name[diff_against]] begin[:] variable[schoenfeld_residuals] assign[=] call[name[np].append, parameter[name[schoenfeld_residuals], binary_operation[name[ei] * binary_operation[name[xi] - name[weighted_mean]]]]] variable[tie_count] assign[=] constant[0] variable[weight_count] assign[=] constant[0.0] variable[tie_phi] assign[=] constant[0] variable[tie_phi_x] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Constant object at 0x7da20c6a9cc0>, <ast.Name object at 0x7da20c6a9000>]]]] variable[diff_against] assign[=] list[[]] return[call[name[schoenfeld_residuals]][<ast.Slice object at 0x7da20c6a89d0>]]
keyword[def] identifier[_compute_schoenfeld_within_strata] ( identifier[self] , identifier[X] , identifier[T] , identifier[E] , identifier[weights] ): literal[string] identifier[n] , identifier[d] = identifier[X] . identifier[shape] keyword[if] keyword[not] identifier[np] . identifier[any] ( identifier[E] ): keyword[return] identifier[np] . identifier[zeros] (( identifier[n] , identifier[d] )) identifier[risk_phi] , identifier[tie_phi] = literal[int] , literal[int] identifier[risk_phi_x] , identifier[tie_phi_x] = identifier[np] . identifier[zeros] (( literal[int] , identifier[d] )), identifier[np] . identifier[zeros] (( literal[int] , identifier[d] )) identifier[weight_count] = literal[int] identifier[tie_count] = literal[int] identifier[scores] = identifier[weights] * identifier[np] . identifier[exp] ( identifier[np] . identifier[dot] ( identifier[X] , identifier[self] . identifier[hazards_] )) identifier[diff_against] =[] identifier[schoenfeld_residuals] = identifier[np] . identifier[empty] (( literal[int] , identifier[d] )) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] - literal[int] ,- literal[int] ,- literal[int] ): identifier[ti] = identifier[T] [ identifier[i] ] identifier[ei] = identifier[E] [ identifier[i] ] identifier[xi] = identifier[X] [ identifier[i] : identifier[i] + literal[int] ] identifier[score] = identifier[scores] [ identifier[i] : identifier[i] + literal[int] ] identifier[w] = identifier[weights] [ identifier[i] ] identifier[phi_i] = identifier[score] identifier[phi_x_i] = identifier[phi_i] * identifier[xi] identifier[risk_phi] = identifier[risk_phi] + identifier[phi_i] identifier[risk_phi_x] = identifier[risk_phi_x] + identifier[phi_x_i] identifier[diff_against] . identifier[append] (( identifier[xi] , identifier[ei] )) keyword[if] identifier[ei] : identifier[tie_phi] = identifier[tie_phi] + identifier[phi_i] identifier[tie_phi_x] = identifier[tie_phi_x] + identifier[phi_x_i] identifier[tie_count] += literal[int] identifier[weight_count] += identifier[w] keyword[if] identifier[i] > literal[int] keyword[and] identifier[T] [ identifier[i] - literal[int] ]== identifier[ti] : keyword[continue] keyword[elif] identifier[tie_count] == literal[int] : keyword[for] identifier[_] keyword[in] identifier[diff_against] : identifier[schoenfeld_residuals] = identifier[np] . identifier[append] ( identifier[schoenfeld_residuals] , identifier[np] . identifier[zeros] (( literal[int] , identifier[d] )), identifier[axis] = literal[int] ) identifier[diff_against] =[] keyword[continue] identifier[weighted_mean] = identifier[np] . identifier[zeros] (( literal[int] , identifier[d] )) keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[tie_count] ): identifier[numer] = identifier[risk_phi_x] - identifier[l] * identifier[tie_phi_x] / identifier[tie_count] identifier[denom] = identifier[risk_phi] - identifier[l] * identifier[tie_phi] / identifier[tie_count] identifier[weighted_mean] += identifier[numer] /( identifier[denom] * identifier[tie_count] ) keyword[for] identifier[xi] , identifier[ei] keyword[in] identifier[diff_against] : identifier[schoenfeld_residuals] = identifier[np] . identifier[append] ( identifier[schoenfeld_residuals] , identifier[ei] *( identifier[xi] - identifier[weighted_mean] ), identifier[axis] = literal[int] ) identifier[tie_count] = literal[int] identifier[weight_count] = literal[int] identifier[tie_phi] = literal[int] identifier[tie_phi_x] = identifier[np] . identifier[zeros] (( literal[int] , identifier[d] )) identifier[diff_against] =[] keyword[return] identifier[schoenfeld_residuals] [::- literal[int] ]
def _compute_schoenfeld_within_strata(self, X, T, E, weights): """ A positive value of the residual shows an X value that is higher than expected at that death time. """ # TODO: the diff_against is gross # This uses Efron ties. (n, d) = X.shape if not np.any(E): # sometimes strata have no deaths. This means nothing is returned # in the below code. return np.zeros((n, d)) # depends on [control=['if'], data=[]] # Init risk and tie sums to zero (risk_phi, tie_phi) = (0, 0) (risk_phi_x, tie_phi_x) = (np.zeros((1, d)), np.zeros((1, d))) # Init number of ties and weights weight_count = 0.0 tie_count = 0 scores = weights * np.exp(np.dot(X, self.hazards_)) diff_against = [] schoenfeld_residuals = np.empty((0, d)) # Iterate backwards to utilize recursive relationship for i in range(n - 1, -1, -1): # Doing it like this to preserve shape ti = T[i] ei = E[i] xi = X[i:i + 1] score = scores[i:i + 1] w = weights[i] # Calculate phi values phi_i = score phi_x_i = phi_i * xi # Calculate sums of Risk set risk_phi = risk_phi + phi_i risk_phi_x = risk_phi_x + phi_x_i # Calculate sums of Ties, if this is an event diff_against.append((xi, ei)) if ei: tie_phi = tie_phi + phi_i tie_phi_x = tie_phi_x + phi_x_i # Keep track of count tie_count += 1 # aka death counts weight_count += w # depends on [control=['if'], data=[]] if i > 0 and T[i - 1] == ti: # There are more ties/members of the risk set continue # depends on [control=['if'], data=[]] elif tie_count == 0: for _ in diff_against: schoenfeld_residuals = np.append(schoenfeld_residuals, np.zeros((1, d)), axis=0) # depends on [control=['for'], data=[]] diff_against = [] continue # depends on [control=['if'], data=[]] # There was atleast one event and no more ties remain. Time to sum. weighted_mean = np.zeros((1, d)) for l in range(tie_count): numer = risk_phi_x - l * tie_phi_x / tie_count denom = risk_phi - l * tie_phi / tie_count weighted_mean += numer / (denom * tie_count) # depends on [control=['for'], data=['l']] for (xi, ei) in diff_against: schoenfeld_residuals = np.append(schoenfeld_residuals, ei * (xi - weighted_mean), axis=0) # depends on [control=['for'], data=[]] # reset tie values tie_count = 0 weight_count = 0.0 tie_phi = 0 tie_phi_x = np.zeros((1, d)) diff_against = [] # depends on [control=['for'], data=['i']] return schoenfeld_residuals[::-1]
def open(cls, filename, band_names=None, lazy_load=True, mutable=False, **kwargs): """ Read a georaster from a file. :param filename: url :param band_names: list of strings, or string. if None - will try to read from image, otherwise - these will be ['0', ..] :param lazy_load: if True - do not load anything :return: GeoRaster2 """ if mutable: geo_raster = MutableGeoRaster(filename=filename, band_names=band_names, **kwargs) else: geo_raster = cls(filename=filename, band_names=band_names, **kwargs) if not lazy_load: geo_raster._populate_from_rasterio_object(read_image=True) return geo_raster
def function[open, parameter[cls, filename, band_names, lazy_load, mutable]]: constant[ Read a georaster from a file. :param filename: url :param band_names: list of strings, or string. if None - will try to read from image, otherwise - these will be ['0', ..] :param lazy_load: if True - do not load anything :return: GeoRaster2 ] if name[mutable] begin[:] variable[geo_raster] assign[=] call[name[MutableGeoRaster], parameter[]] if <ast.UnaryOp object at 0x7da204623430> begin[:] call[name[geo_raster]._populate_from_rasterio_object, parameter[]] return[name[geo_raster]]
keyword[def] identifier[open] ( identifier[cls] , identifier[filename] , identifier[band_names] = keyword[None] , identifier[lazy_load] = keyword[True] , identifier[mutable] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[mutable] : identifier[geo_raster] = identifier[MutableGeoRaster] ( identifier[filename] = identifier[filename] , identifier[band_names] = identifier[band_names] ,** identifier[kwargs] ) keyword[else] : identifier[geo_raster] = identifier[cls] ( identifier[filename] = identifier[filename] , identifier[band_names] = identifier[band_names] ,** identifier[kwargs] ) keyword[if] keyword[not] identifier[lazy_load] : identifier[geo_raster] . identifier[_populate_from_rasterio_object] ( identifier[read_image] = keyword[True] ) keyword[return] identifier[geo_raster]
def open(cls, filename, band_names=None, lazy_load=True, mutable=False, **kwargs): """ Read a georaster from a file. :param filename: url :param band_names: list of strings, or string. if None - will try to read from image, otherwise - these will be ['0', ..] :param lazy_load: if True - do not load anything :return: GeoRaster2 """ if mutable: geo_raster = MutableGeoRaster(filename=filename, band_names=band_names, **kwargs) # depends on [control=['if'], data=[]] else: geo_raster = cls(filename=filename, band_names=band_names, **kwargs) if not lazy_load: geo_raster._populate_from_rasterio_object(read_image=True) # depends on [control=['if'], data=[]] return geo_raster
def get_sort_indicator(self, field): """ Returns a sort class for the active sort only. That is, if field is not sort_field, then nothing will be returned becaues the sort is not active. """ indicator = '' if field == self.sort_field: indicator = 'sort-asc' if self.sort_order == '-': indicator = 'sort-desc' return indicator
def function[get_sort_indicator, parameter[self, field]]: constant[ Returns a sort class for the active sort only. That is, if field is not sort_field, then nothing will be returned becaues the sort is not active. ] variable[indicator] assign[=] constant[] if compare[name[field] equal[==] name[self].sort_field] begin[:] variable[indicator] assign[=] constant[sort-asc] if compare[name[self].sort_order equal[==] constant[-]] begin[:] variable[indicator] assign[=] constant[sort-desc] return[name[indicator]]
keyword[def] identifier[get_sort_indicator] ( identifier[self] , identifier[field] ): literal[string] identifier[indicator] = literal[string] keyword[if] identifier[field] == identifier[self] . identifier[sort_field] : identifier[indicator] = literal[string] keyword[if] identifier[self] . identifier[sort_order] == literal[string] : identifier[indicator] = literal[string] keyword[return] identifier[indicator]
def get_sort_indicator(self, field): """ Returns a sort class for the active sort only. That is, if field is not sort_field, then nothing will be returned becaues the sort is not active. """ indicator = '' if field == self.sort_field: indicator = 'sort-asc' if self.sort_order == '-': indicator = 'sort-desc' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return indicator
def p_initial(self, p): 'initial : INITIAL initial_statement' p[0] = Initial(p[2], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
def function[p_initial, parameter[self, p]]: constant[initial : INITIAL initial_statement] call[name[p]][constant[0]] assign[=] call[name[Initial], parameter[call[name[p]][constant[2]]]] call[name[p].set_lineno, parameter[constant[0], call[name[p].lineno, parameter[constant[1]]]]]
keyword[def] identifier[p_initial] ( identifier[self] , identifier[p] ): literal[string] identifier[p] [ literal[int] ]= identifier[Initial] ( identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] )) identifier[p] . identifier[set_lineno] ( literal[int] , identifier[p] . identifier[lineno] ( literal[int] ))
def p_initial(self, p): """initial : INITIAL initial_statement""" p[0] = Initial(p[2], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
def makeWritePacket(ID, reg, values=None): """ Creates a packet that writes a value(s) to servo ID at location reg. Make sure the values are in little endian (use Packet.le() if necessary) for 16 b (word size) values. """ pkt = makePacket(ID, xl320.XL320_WRITE, reg, values) return pkt
def function[makeWritePacket, parameter[ID, reg, values]]: constant[ Creates a packet that writes a value(s) to servo ID at location reg. Make sure the values are in little endian (use Packet.le() if necessary) for 16 b (word size) values. ] variable[pkt] assign[=] call[name[makePacket], parameter[name[ID], name[xl320].XL320_WRITE, name[reg], name[values]]] return[name[pkt]]
keyword[def] identifier[makeWritePacket] ( identifier[ID] , identifier[reg] , identifier[values] = keyword[None] ): literal[string] identifier[pkt] = identifier[makePacket] ( identifier[ID] , identifier[xl320] . identifier[XL320_WRITE] , identifier[reg] , identifier[values] ) keyword[return] identifier[pkt]
def makeWritePacket(ID, reg, values=None): """ Creates a packet that writes a value(s) to servo ID at location reg. Make sure the values are in little endian (use Packet.le() if necessary) for 16 b (word size) values. """ pkt = makePacket(ID, xl320.XL320_WRITE, reg, values) return pkt
def cuts_connections(self, a, b): """Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes. """ n = max(self.indices) + 1 return self.cut_matrix(n)[np.ix_(a, b)].any()
def function[cuts_connections, parameter[self, a, b]]: constant[Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes. ] variable[n] assign[=] binary_operation[call[name[max], parameter[name[self].indices]] + constant[1]] return[call[call[call[name[self].cut_matrix, parameter[name[n]]]][call[name[np].ix_, parameter[name[a], name[b]]]].any, parameter[]]]
keyword[def] identifier[cuts_connections] ( identifier[self] , identifier[a] , identifier[b] ): literal[string] identifier[n] = identifier[max] ( identifier[self] . identifier[indices] )+ literal[int] keyword[return] identifier[self] . identifier[cut_matrix] ( identifier[n] )[ identifier[np] . identifier[ix_] ( identifier[a] , identifier[b] )]. identifier[any] ()
def cuts_connections(self, a, b): """Check if this cut severs any connections from ``a`` to ``b``. Args: a (tuple[int]): A set of nodes. b (tuple[int]): A set of nodes. """ n = max(self.indices) + 1 return self.cut_matrix(n)[np.ix_(a, b)].any()
def single_conv_dist(name, x, output_channels=None): """A 3x3 convolution mapping x to a standard normal distribution at init. Args: name: variable scope. x: 4-D Tensor. output_channels: number of channels of the mean and std. """ with tf.variable_scope(name, reuse=tf.AUTO_REUSE): x_shape = common_layers.shape_list(x) if output_channels is None: output_channels = x_shape[-1] mean_log_scale = conv("conv2d", x, output_channels=2*output_channels, conv_init="zeros", apply_actnorm=False) mean = mean_log_scale[:, :, :, 0::2] log_scale = mean_log_scale[:, :, :, 1::2] return tf.distributions.Normal(mean, tf.exp(log_scale))
def function[single_conv_dist, parameter[name, x, output_channels]]: constant[A 3x3 convolution mapping x to a standard normal distribution at init. Args: name: variable scope. x: 4-D Tensor. output_channels: number of channels of the mean and std. ] with call[name[tf].variable_scope, parameter[name[name]]] begin[:] variable[x_shape] assign[=] call[name[common_layers].shape_list, parameter[name[x]]] if compare[name[output_channels] is constant[None]] begin[:] variable[output_channels] assign[=] call[name[x_shape]][<ast.UnaryOp object at 0x7da1b20fb190>] variable[mean_log_scale] assign[=] call[name[conv], parameter[constant[conv2d], name[x]]] variable[mean] assign[=] call[name[mean_log_scale]][tuple[[<ast.Slice object at 0x7da1b20faaa0>, <ast.Slice object at 0x7da1b20f9750>, <ast.Slice object at 0x7da1b20f8d00>, <ast.Slice object at 0x7da1b20f8af0>]]] variable[log_scale] assign[=] call[name[mean_log_scale]][tuple[[<ast.Slice object at 0x7da1b20f8220>, <ast.Slice object at 0x7da1b20fb3a0>, <ast.Slice object at 0x7da1b20faec0>, <ast.Slice object at 0x7da1b20f8a90>]]] return[call[name[tf].distributions.Normal, parameter[name[mean], call[name[tf].exp, parameter[name[log_scale]]]]]]
keyword[def] identifier[single_conv_dist] ( identifier[name] , identifier[x] , identifier[output_channels] = keyword[None] ): literal[string] keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] , identifier[reuse] = identifier[tf] . identifier[AUTO_REUSE] ): identifier[x_shape] = identifier[common_layers] . identifier[shape_list] ( identifier[x] ) keyword[if] identifier[output_channels] keyword[is] keyword[None] : identifier[output_channels] = identifier[x_shape] [- literal[int] ] identifier[mean_log_scale] = identifier[conv] ( literal[string] , identifier[x] , identifier[output_channels] = literal[int] * identifier[output_channels] , identifier[conv_init] = literal[string] , identifier[apply_actnorm] = keyword[False] ) identifier[mean] = identifier[mean_log_scale] [:,:,:, literal[int] :: literal[int] ] identifier[log_scale] = identifier[mean_log_scale] [:,:,:, literal[int] :: literal[int] ] keyword[return] identifier[tf] . identifier[distributions] . identifier[Normal] ( identifier[mean] , identifier[tf] . identifier[exp] ( identifier[log_scale] ))
def single_conv_dist(name, x, output_channels=None): """A 3x3 convolution mapping x to a standard normal distribution at init. Args: name: variable scope. x: 4-D Tensor. output_channels: number of channels of the mean and std. """ with tf.variable_scope(name, reuse=tf.AUTO_REUSE): x_shape = common_layers.shape_list(x) if output_channels is None: output_channels = x_shape[-1] # depends on [control=['if'], data=['output_channels']] mean_log_scale = conv('conv2d', x, output_channels=2 * output_channels, conv_init='zeros', apply_actnorm=False) mean = mean_log_scale[:, :, :, 0::2] log_scale = mean_log_scale[:, :, :, 1::2] return tf.distributions.Normal(mean, tf.exp(log_scale)) # depends on [control=['with'], data=[]]
def remove(self, dist): """Remove `dist` from the distribution map""" while dist.location in self.paths: self.paths.remove(dist.location) self.dirty = True Environment.remove(self, dist)
def function[remove, parameter[self, dist]]: constant[Remove `dist` from the distribution map] while compare[name[dist].location in name[self].paths] begin[:] call[name[self].paths.remove, parameter[name[dist].location]] name[self].dirty assign[=] constant[True] call[name[Environment].remove, parameter[name[self], name[dist]]]
keyword[def] identifier[remove] ( identifier[self] , identifier[dist] ): literal[string] keyword[while] identifier[dist] . identifier[location] keyword[in] identifier[self] . identifier[paths] : identifier[self] . identifier[paths] . identifier[remove] ( identifier[dist] . identifier[location] ) identifier[self] . identifier[dirty] = keyword[True] identifier[Environment] . identifier[remove] ( identifier[self] , identifier[dist] )
def remove(self, dist): """Remove `dist` from the distribution map""" while dist.location in self.paths: self.paths.remove(dist.location) self.dirty = True # depends on [control=['while'], data=[]] Environment.remove(self, dist)
def _execute(self, endpoint, database, query, default_timeout, properties=None): """Executes given query against this client""" request_payload = {"db": database, "csl": query} if properties: request_payload["properties"] = properties.to_json() request_headers = { "Accept": "application/json", "Accept-Encoding": "gzip,deflate", "Content-Type": "application/json; charset=utf-8", "x-ms-client-version": "Kusto.Python.Client:" + VERSION, "x-ms-client-request-id": "KPC.execute;" + str(uuid.uuid4()), } if self._auth_provider: request_headers["Authorization"] = self._auth_provider.acquire_authorization_header() timeout = self._get_timeout(properties, default_timeout) response = self._session.post(endpoint, headers=request_headers, json=request_payload, timeout=timeout.seconds) if response.status_code == 200: if endpoint.endswith("v2/rest/query"): return KustoResponseDataSetV2(response.json()) return KustoResponseDataSetV1(response.json()) raise KustoServiceError([response.json()], response)
def function[_execute, parameter[self, endpoint, database, query, default_timeout, properties]]: constant[Executes given query against this client] variable[request_payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b23447f0>, <ast.Constant object at 0x7da1b23477f0>], [<ast.Name object at 0x7da1b2344550>, <ast.Name object at 0x7da1b2347760>]] if name[properties] begin[:] call[name[request_payload]][constant[properties]] assign[=] call[name[properties].to_json, parameter[]] variable[request_headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b2345c60>, <ast.Constant object at 0x7da1b2347730>, <ast.Constant object at 0x7da1b2344580>, <ast.Constant object at 0x7da1b2346fb0>, <ast.Constant object at 0x7da1b2345990>], [<ast.Constant object at 0x7da1b23458a0>, <ast.Constant object at 0x7da1b2345d20>, <ast.Constant object at 0x7da1b2346620>, <ast.BinOp object at 0x7da1b2347640>, <ast.BinOp object at 0x7da1b2344220>]] if name[self]._auth_provider begin[:] call[name[request_headers]][constant[Authorization]] assign[=] call[name[self]._auth_provider.acquire_authorization_header, parameter[]] variable[timeout] assign[=] call[name[self]._get_timeout, parameter[name[properties], name[default_timeout]]] variable[response] assign[=] call[name[self]._session.post, parameter[name[endpoint]]] if compare[name[response].status_code equal[==] constant[200]] begin[:] if call[name[endpoint].endswith, parameter[constant[v2/rest/query]]] begin[:] return[call[name[KustoResponseDataSetV2], parameter[call[name[response].json, parameter[]]]]] return[call[name[KustoResponseDataSetV1], parameter[call[name[response].json, parameter[]]]]] <ast.Raise object at 0x7da1b16be650>
keyword[def] identifier[_execute] ( identifier[self] , identifier[endpoint] , identifier[database] , identifier[query] , identifier[default_timeout] , identifier[properties] = keyword[None] ): literal[string] identifier[request_payload] ={ literal[string] : identifier[database] , literal[string] : identifier[query] } keyword[if] identifier[properties] : identifier[request_payload] [ literal[string] ]= identifier[properties] . identifier[to_json] () identifier[request_headers] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] + identifier[VERSION] , literal[string] : literal[string] + identifier[str] ( identifier[uuid] . identifier[uuid4] ()), } keyword[if] identifier[self] . identifier[_auth_provider] : identifier[request_headers] [ literal[string] ]= identifier[self] . identifier[_auth_provider] . identifier[acquire_authorization_header] () identifier[timeout] = identifier[self] . identifier[_get_timeout] ( identifier[properties] , identifier[default_timeout] ) identifier[response] = identifier[self] . identifier[_session] . identifier[post] ( identifier[endpoint] , identifier[headers] = identifier[request_headers] , identifier[json] = identifier[request_payload] , identifier[timeout] = identifier[timeout] . identifier[seconds] ) keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[if] identifier[endpoint] . identifier[endswith] ( literal[string] ): keyword[return] identifier[KustoResponseDataSetV2] ( identifier[response] . identifier[json] ()) keyword[return] identifier[KustoResponseDataSetV1] ( identifier[response] . identifier[json] ()) keyword[raise] identifier[KustoServiceError] ([ identifier[response] . identifier[json] ()], identifier[response] )
def _execute(self, endpoint, database, query, default_timeout, properties=None): """Executes given query against this client""" request_payload = {'db': database, 'csl': query} if properties: request_payload['properties'] = properties.to_json() # depends on [control=['if'], data=[]] request_headers = {'Accept': 'application/json', 'Accept-Encoding': 'gzip,deflate', 'Content-Type': 'application/json; charset=utf-8', 'x-ms-client-version': 'Kusto.Python.Client:' + VERSION, 'x-ms-client-request-id': 'KPC.execute;' + str(uuid.uuid4())} if self._auth_provider: request_headers['Authorization'] = self._auth_provider.acquire_authorization_header() # depends on [control=['if'], data=[]] timeout = self._get_timeout(properties, default_timeout) response = self._session.post(endpoint, headers=request_headers, json=request_payload, timeout=timeout.seconds) if response.status_code == 200: if endpoint.endswith('v2/rest/query'): return KustoResponseDataSetV2(response.json()) # depends on [control=['if'], data=[]] return KustoResponseDataSetV1(response.json()) # depends on [control=['if'], data=[]] raise KustoServiceError([response.json()], response)
def return_tip(self, home_after=True): """ Drop the pipette's current tip to it's originating tip rack Notes ----- This method requires one or more tip-rack :any:`Container` to be in this Pipette's `tip_racks` list (see :any:`Pipette`) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> tiprack = labware.load('GEB-tiprack-300', '2') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left', ... tip_racks=[tiprack, tiprack2]) # doctest: +SKIP >>> p300.pick_up_tip() # doctest: +SKIP >>> p300.aspirate(50, plate[0]) # doctest: +SKIP >>> p300.dispense(plate[1]) # doctest: +SKIP >>> p300.return_tip() # doctest: +SKIP """ if not self.tip_attached: log.warning("Cannot return tip without tip attached.") if not self.current_tip(): self.robot.add_warning( 'Pipette has no tip to return, dropping in place') self.drop_tip(self.current_tip(), home_after=home_after) return self
def function[return_tip, parameter[self, home_after]]: constant[ Drop the pipette's current tip to it's originating tip rack Notes ----- This method requires one or more tip-rack :any:`Container` to be in this Pipette's `tip_racks` list (see :any:`Pipette`) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> tiprack = labware.load('GEB-tiprack-300', '2') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left', ... tip_racks=[tiprack, tiprack2]) # doctest: +SKIP >>> p300.pick_up_tip() # doctest: +SKIP >>> p300.aspirate(50, plate[0]) # doctest: +SKIP >>> p300.dispense(plate[1]) # doctest: +SKIP >>> p300.return_tip() # doctest: +SKIP ] if <ast.UnaryOp object at 0x7da1b086ec80> begin[:] call[name[log].warning, parameter[constant[Cannot return tip without tip attached.]]] if <ast.UnaryOp object at 0x7da1b086d3c0> begin[:] call[name[self].robot.add_warning, parameter[constant[Pipette has no tip to return, dropping in place]]] call[name[self].drop_tip, parameter[call[name[self].current_tip, parameter[]]]] return[name[self]]
keyword[def] identifier[return_tip] ( identifier[self] , identifier[home_after] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[tip_attached] : identifier[log] . identifier[warning] ( literal[string] ) keyword[if] keyword[not] identifier[self] . identifier[current_tip] (): identifier[self] . identifier[robot] . identifier[add_warning] ( literal[string] ) identifier[self] . identifier[drop_tip] ( identifier[self] . identifier[current_tip] (), identifier[home_after] = identifier[home_after] ) keyword[return] identifier[self]
def return_tip(self, home_after=True): """ Drop the pipette's current tip to it's originating tip rack Notes ----- This method requires one or more tip-rack :any:`Container` to be in this Pipette's `tip_racks` list (see :any:`Pipette`) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> tiprack = labware.load('GEB-tiprack-300', '2') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left', ... tip_racks=[tiprack, tiprack2]) # doctest: +SKIP >>> p300.pick_up_tip() # doctest: +SKIP >>> p300.aspirate(50, plate[0]) # doctest: +SKIP >>> p300.dispense(plate[1]) # doctest: +SKIP >>> p300.return_tip() # doctest: +SKIP """ if not self.tip_attached: log.warning('Cannot return tip without tip attached.') # depends on [control=['if'], data=[]] if not self.current_tip(): self.robot.add_warning('Pipette has no tip to return, dropping in place') # depends on [control=['if'], data=[]] self.drop_tip(self.current_tip(), home_after=home_after) return self
def dragEnterEvent(self, event): """ Listens for query's being dragged and dropped onto this tree. :param event | <QDragEnterEvent> """ data = event.mimeData() if data.hasFormat('application/x-orb-table') and \ data.hasFormat('application/x-orb-query'): tableName = self.tableTypeName() if nativestring(data.data('application/x-orb-table')) == tableName: event.acceptProposedAction() return super(XOrbTreeWidget, self).dragEnterEvent(event)
def function[dragEnterEvent, parameter[self, event]]: constant[ Listens for query's being dragged and dropped onto this tree. :param event | <QDragEnterEvent> ] variable[data] assign[=] call[name[event].mimeData, parameter[]] if <ast.BoolOp object at 0x7da18eb55a80> begin[:] variable[tableName] assign[=] call[name[self].tableTypeName, parameter[]] if compare[call[name[nativestring], parameter[call[name[data].data, parameter[constant[application/x-orb-table]]]]] equal[==] name[tableName]] begin[:] call[name[event].acceptProposedAction, parameter[]] return[None] call[call[name[super], parameter[name[XOrbTreeWidget], name[self]]].dragEnterEvent, parameter[name[event]]]
keyword[def] identifier[dragEnterEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[data] = identifier[event] . identifier[mimeData] () keyword[if] identifier[data] . identifier[hasFormat] ( literal[string] ) keyword[and] identifier[data] . identifier[hasFormat] ( literal[string] ): identifier[tableName] = identifier[self] . identifier[tableTypeName] () keyword[if] identifier[nativestring] ( identifier[data] . identifier[data] ( literal[string] ))== identifier[tableName] : identifier[event] . identifier[acceptProposedAction] () keyword[return] identifier[super] ( identifier[XOrbTreeWidget] , identifier[self] ). identifier[dragEnterEvent] ( identifier[event] )
def dragEnterEvent(self, event): """ Listens for query's being dragged and dropped onto this tree. :param event | <QDragEnterEvent> """ data = event.mimeData() if data.hasFormat('application/x-orb-table') and data.hasFormat('application/x-orb-query'): tableName = self.tableTypeName() if nativestring(data.data('application/x-orb-table')) == tableName: event.acceptProposedAction() return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] super(XOrbTreeWidget, self).dragEnterEvent(event)
def remove_entry_listener(self, registration_id): """ Removes the specified entry listener. Returns silently if there is no such listener added before. :param registration_id: (str), id of registered listener. :return: (bool), ``true`` if registration is removed, ``false`` otherwise. """ return self._stop_listening(registration_id, lambda i: multi_map_remove_entry_listener_codec.encode_request(self.name, i))
def function[remove_entry_listener, parameter[self, registration_id]]: constant[ Removes the specified entry listener. Returns silently if there is no such listener added before. :param registration_id: (str), id of registered listener. :return: (bool), ``true`` if registration is removed, ``false`` otherwise. ] return[call[name[self]._stop_listening, parameter[name[registration_id], <ast.Lambda object at 0x7da1b1720430>]]]
keyword[def] identifier[remove_entry_listener] ( identifier[self] , identifier[registration_id] ): literal[string] keyword[return] identifier[self] . identifier[_stop_listening] ( identifier[registration_id] , keyword[lambda] identifier[i] : identifier[multi_map_remove_entry_listener_codec] . identifier[encode_request] ( identifier[self] . identifier[name] , identifier[i] ))
def remove_entry_listener(self, registration_id): """ Removes the specified entry listener. Returns silently if there is no such listener added before. :param registration_id: (str), id of registered listener. :return: (bool), ``true`` if registration is removed, ``false`` otherwise. """ return self._stop_listening(registration_id, lambda i: multi_map_remove_entry_listener_codec.encode_request(self.name, i))
def get_agg(self): """ Returns the aggregated value for the metric :return: the value of the metric """ """ Returns an aggregated value """ query = self.get_query(False) res = self.get_metrics_data(query) # We need to extract the data from the JSON res # If we have agg data use it agg_id = str(ElasticQuery.AGGREGATION_ID) if 'aggregations' in res and 'values' in res['aggregations'][agg_id]: if self.AGG_TYPE == 'median': agg = res['aggregations'][agg_id]['values']["50.0"] if agg == 'NaN': # ES returns NaN. Convert to None for matplotlib graph agg = None else: raise RuntimeError("Multivalue aggregation result not supported") elif 'aggregations' in res and 'value' in res['aggregations'][agg_id]: agg = res['aggregations'][agg_id]['value'] else: agg = res['hits']['total'] return agg
def function[get_agg, parameter[self]]: constant[ Returns the aggregated value for the metric :return: the value of the metric ] constant[ Returns an aggregated value ] variable[query] assign[=] call[name[self].get_query, parameter[constant[False]]] variable[res] assign[=] call[name[self].get_metrics_data, parameter[name[query]]] variable[agg_id] assign[=] call[name[str], parameter[name[ElasticQuery].AGGREGATION_ID]] if <ast.BoolOp object at 0x7da1b268c6d0> begin[:] if compare[name[self].AGG_TYPE equal[==] constant[median]] begin[:] variable[agg] assign[=] call[call[call[call[name[res]][constant[aggregations]]][name[agg_id]]][constant[values]]][constant[50.0]] if compare[name[agg] equal[==] constant[NaN]] begin[:] variable[agg] assign[=] constant[None] return[name[agg]]
keyword[def] identifier[get_agg] ( identifier[self] ): literal[string] literal[string] identifier[query] = identifier[self] . identifier[get_query] ( keyword[False] ) identifier[res] = identifier[self] . identifier[get_metrics_data] ( identifier[query] ) identifier[agg_id] = identifier[str] ( identifier[ElasticQuery] . identifier[AGGREGATION_ID] ) keyword[if] literal[string] keyword[in] identifier[res] keyword[and] literal[string] keyword[in] identifier[res] [ literal[string] ][ identifier[agg_id] ]: keyword[if] identifier[self] . identifier[AGG_TYPE] == literal[string] : identifier[agg] = identifier[res] [ literal[string] ][ identifier[agg_id] ][ literal[string] ][ literal[string] ] keyword[if] identifier[agg] == literal[string] : identifier[agg] = keyword[None] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[elif] literal[string] keyword[in] identifier[res] keyword[and] literal[string] keyword[in] identifier[res] [ literal[string] ][ identifier[agg_id] ]: identifier[agg] = identifier[res] [ literal[string] ][ identifier[agg_id] ][ literal[string] ] keyword[else] : identifier[agg] = identifier[res] [ literal[string] ][ literal[string] ] keyword[return] identifier[agg]
def get_agg(self): """ Returns the aggregated value for the metric :return: the value of the metric """ ' Returns an aggregated value ' query = self.get_query(False) res = self.get_metrics_data(query) # We need to extract the data from the JSON res # If we have agg data use it agg_id = str(ElasticQuery.AGGREGATION_ID) if 'aggregations' in res and 'values' in res['aggregations'][agg_id]: if self.AGG_TYPE == 'median': agg = res['aggregations'][agg_id]['values']['50.0'] if agg == 'NaN': # ES returns NaN. Convert to None for matplotlib graph agg = None # depends on [control=['if'], data=['agg']] # depends on [control=['if'], data=[]] else: raise RuntimeError('Multivalue aggregation result not supported') # depends on [control=['if'], data=[]] elif 'aggregations' in res and 'value' in res['aggregations'][agg_id]: agg = res['aggregations'][agg_id]['value'] # depends on [control=['if'], data=[]] else: agg = res['hits']['total'] return agg
def on_hook(self, hook): # type: (Hook) -> None """Takes a hook, and optionally calls hook.run on a function""" try: func, args_gen = self.hooked[type(hook)] except (KeyError, TypeError): return else: hook(func, args_gen())
def function[on_hook, parameter[self, hook]]: constant[Takes a hook, and optionally calls hook.run on a function] <ast.Try object at 0x7da18ede57e0>
keyword[def] identifier[on_hook] ( identifier[self] , identifier[hook] ): literal[string] keyword[try] : identifier[func] , identifier[args_gen] = identifier[self] . identifier[hooked] [ identifier[type] ( identifier[hook] )] keyword[except] ( identifier[KeyError] , identifier[TypeError] ): keyword[return] keyword[else] : identifier[hook] ( identifier[func] , identifier[args_gen] ())
def on_hook(self, hook): # type: (Hook) -> None 'Takes a hook, and optionally calls hook.run on a function' try: (func, args_gen) = self.hooked[type(hook)] # depends on [control=['try'], data=[]] except (KeyError, TypeError): return # depends on [control=['except'], data=[]] else: hook(func, args_gen())
def get(self, key, default=None): """ Returns the value of the key or the default value if the key is not yet in gconf """ #function arguments override defaults if default is None: default = self.DEFAULTS.get(key, None) vtype = type(default) #we now have a valid key and type if default is None: logger.warn("Unknown key: %s, must specify default value" % key) return None if vtype not in self.VALID_KEY_TYPES: logger.warn("Invalid key type: %s" % vtype) return None #for gconf refer to the full key path key = self._fix_key(key) if key not in self._notifications: self._client.notify_add(key, self._key_changed, None) self._notifications.append(key) value = self._client.get(key) if value is None: self.set(key, default) return default value = self._get_value(value, default) if value is not None: return value logger.warn("Unknown gconf key: %s" % key) return None
def function[get, parameter[self, key, default]]: constant[ Returns the value of the key or the default value if the key is not yet in gconf ] if compare[name[default] is constant[None]] begin[:] variable[default] assign[=] call[name[self].DEFAULTS.get, parameter[name[key], constant[None]]] variable[vtype] assign[=] call[name[type], parameter[name[default]]] if compare[name[default] is constant[None]] begin[:] call[name[logger].warn, parameter[binary_operation[constant[Unknown key: %s, must specify default value] <ast.Mod object at 0x7da2590d6920> name[key]]]] return[constant[None]] if compare[name[vtype] <ast.NotIn object at 0x7da2590d7190> name[self].VALID_KEY_TYPES] begin[:] call[name[logger].warn, parameter[binary_operation[constant[Invalid key type: %s] <ast.Mod object at 0x7da2590d6920> name[vtype]]]] return[constant[None]] variable[key] assign[=] call[name[self]._fix_key, parameter[name[key]]] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[self]._notifications] begin[:] call[name[self]._client.notify_add, parameter[name[key], name[self]._key_changed, constant[None]]] call[name[self]._notifications.append, parameter[name[key]]] variable[value] assign[=] call[name[self]._client.get, parameter[name[key]]] if compare[name[value] is constant[None]] begin[:] call[name[self].set, parameter[name[key], name[default]]] return[name[default]] variable[value] assign[=] call[name[self]._get_value, parameter[name[value], name[default]]] if compare[name[value] is_not constant[None]] begin[:] return[name[value]] call[name[logger].warn, parameter[binary_operation[constant[Unknown gconf key: %s] <ast.Mod object at 0x7da2590d6920> name[key]]]] return[constant[None]]
keyword[def] identifier[get] ( identifier[self] , identifier[key] , identifier[default] = keyword[None] ): literal[string] keyword[if] identifier[default] keyword[is] keyword[None] : identifier[default] = identifier[self] . identifier[DEFAULTS] . identifier[get] ( identifier[key] , keyword[None] ) identifier[vtype] = identifier[type] ( identifier[default] ) keyword[if] identifier[default] keyword[is] keyword[None] : identifier[logger] . identifier[warn] ( literal[string] % identifier[key] ) keyword[return] keyword[None] keyword[if] identifier[vtype] keyword[not] keyword[in] identifier[self] . identifier[VALID_KEY_TYPES] : identifier[logger] . identifier[warn] ( literal[string] % identifier[vtype] ) keyword[return] keyword[None] identifier[key] = identifier[self] . identifier[_fix_key] ( identifier[key] ) keyword[if] identifier[key] keyword[not] keyword[in] identifier[self] . identifier[_notifications] : identifier[self] . identifier[_client] . identifier[notify_add] ( identifier[key] , identifier[self] . identifier[_key_changed] , keyword[None] ) identifier[self] . identifier[_notifications] . identifier[append] ( identifier[key] ) identifier[value] = identifier[self] . identifier[_client] . identifier[get] ( identifier[key] ) keyword[if] identifier[value] keyword[is] keyword[None] : identifier[self] . identifier[set] ( identifier[key] , identifier[default] ) keyword[return] identifier[default] identifier[value] = identifier[self] . identifier[_get_value] ( identifier[value] , identifier[default] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[value] identifier[logger] . identifier[warn] ( literal[string] % identifier[key] ) keyword[return] keyword[None]
def get(self, key, default=None): """ Returns the value of the key or the default value if the key is not yet in gconf """ #function arguments override defaults if default is None: default = self.DEFAULTS.get(key, None) # depends on [control=['if'], data=['default']] vtype = type(default) #we now have a valid key and type if default is None: logger.warn('Unknown key: %s, must specify default value' % key) return None # depends on [control=['if'], data=[]] if vtype not in self.VALID_KEY_TYPES: logger.warn('Invalid key type: %s' % vtype) return None # depends on [control=['if'], data=['vtype']] #for gconf refer to the full key path key = self._fix_key(key) if key not in self._notifications: self._client.notify_add(key, self._key_changed, None) self._notifications.append(key) # depends on [control=['if'], data=['key']] value = self._client.get(key) if value is None: self.set(key, default) return default # depends on [control=['if'], data=[]] value = self._get_value(value, default) if value is not None: return value # depends on [control=['if'], data=['value']] logger.warn('Unknown gconf key: %s' % key) return None
def _install_from_scratch(python_cmd, use_sudo): """ Install setuptools from scratch using installer """ with cd("/tmp"): download(EZ_SETUP_URL) command = '%(python_cmd)s ez_setup.py' % locals() if use_sudo: run_as_root(command) else: run(command) run('rm -f ez_setup.py')
def function[_install_from_scratch, parameter[python_cmd, use_sudo]]: constant[ Install setuptools from scratch using installer ] with call[name[cd], parameter[constant[/tmp]]] begin[:] call[name[download], parameter[name[EZ_SETUP_URL]]] variable[command] assign[=] binary_operation[constant[%(python_cmd)s ez_setup.py] <ast.Mod object at 0x7da2590d6920> call[name[locals], parameter[]]] if name[use_sudo] begin[:] call[name[run_as_root], parameter[name[command]]] call[name[run], parameter[constant[rm -f ez_setup.py]]]
keyword[def] identifier[_install_from_scratch] ( identifier[python_cmd] , identifier[use_sudo] ): literal[string] keyword[with] identifier[cd] ( literal[string] ): identifier[download] ( identifier[EZ_SETUP_URL] ) identifier[command] = literal[string] % identifier[locals] () keyword[if] identifier[use_sudo] : identifier[run_as_root] ( identifier[command] ) keyword[else] : identifier[run] ( identifier[command] ) identifier[run] ( literal[string] )
def _install_from_scratch(python_cmd, use_sudo): """ Install setuptools from scratch using installer """ with cd('/tmp'): download(EZ_SETUP_URL) command = '%(python_cmd)s ez_setup.py' % locals() if use_sudo: run_as_root(command) # depends on [control=['if'], data=[]] else: run(command) run('rm -f ez_setup.py') # depends on [control=['with'], data=[]]
def __clearRepositoryCache(self, duplicate=None): """Called when we change the repository(ies) for a directory. This clears any cached information that is invalidated by changing the repository.""" for node in list(self.entries.values()): if node != self.dir: if node != self and isinstance(node, Dir): node.__clearRepositoryCache(duplicate) else: node.clear() try: del node._srcreps except AttributeError: pass if duplicate is not None: node.duplicate=duplicate
def function[__clearRepositoryCache, parameter[self, duplicate]]: constant[Called when we change the repository(ies) for a directory. This clears any cached information that is invalidated by changing the repository.] for taget[name[node]] in starred[call[name[list], parameter[call[name[self].entries.values, parameter[]]]]] begin[:] if compare[name[node] not_equal[!=] name[self].dir] begin[:] if <ast.BoolOp object at 0x7da204621450> begin[:] call[name[node].__clearRepositoryCache, parameter[name[duplicate]]]
keyword[def] identifier[__clearRepositoryCache] ( identifier[self] , identifier[duplicate] = keyword[None] ): literal[string] keyword[for] identifier[node] keyword[in] identifier[list] ( identifier[self] . identifier[entries] . identifier[values] ()): keyword[if] identifier[node] != identifier[self] . identifier[dir] : keyword[if] identifier[node] != identifier[self] keyword[and] identifier[isinstance] ( identifier[node] , identifier[Dir] ): identifier[node] . identifier[__clearRepositoryCache] ( identifier[duplicate] ) keyword[else] : identifier[node] . identifier[clear] () keyword[try] : keyword[del] identifier[node] . identifier[_srcreps] keyword[except] identifier[AttributeError] : keyword[pass] keyword[if] identifier[duplicate] keyword[is] keyword[not] keyword[None] : identifier[node] . identifier[duplicate] = identifier[duplicate]
def __clearRepositoryCache(self, duplicate=None): """Called when we change the repository(ies) for a directory. This clears any cached information that is invalidated by changing the repository.""" for node in list(self.entries.values()): if node != self.dir: if node != self and isinstance(node, Dir): node.__clearRepositoryCache(duplicate) # depends on [control=['if'], data=[]] else: node.clear() try: del node._srcreps # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] if duplicate is not None: node.duplicate = duplicate # depends on [control=['if'], data=['duplicate']] # depends on [control=['if'], data=['node']] # depends on [control=['for'], data=['node']]
def _serialize_object(self, response_data, request): """ Override to not serialize doc responses. """ if self._is_doc_request(request): return response_data else: return super(DocumentedResource, self)._serialize_object( response_data, request)
def function[_serialize_object, parameter[self, response_data, request]]: constant[ Override to not serialize doc responses. ] if call[name[self]._is_doc_request, parameter[name[request]]] begin[:] return[name[response_data]]
keyword[def] identifier[_serialize_object] ( identifier[self] , identifier[response_data] , identifier[request] ): literal[string] keyword[if] identifier[self] . identifier[_is_doc_request] ( identifier[request] ): keyword[return] identifier[response_data] keyword[else] : keyword[return] identifier[super] ( identifier[DocumentedResource] , identifier[self] ). identifier[_serialize_object] ( identifier[response_data] , identifier[request] )
def _serialize_object(self, response_data, request): """ Override to not serialize doc responses. """ if self._is_doc_request(request): return response_data # depends on [control=['if'], data=[]] else: return super(DocumentedResource, self)._serialize_object(response_data, request)
def extension_preselection(network, args, method, days = 3): """ Function that preselects lines which are extendend in snapshots leading to overloading to reduce nubmer of extension variables. Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA args : dict Arguments set in appl.py method: str Choose method of selection: 'extreme_situations' for remarkable timsteps (e.g. minimal resiudual load) 'snapshot_clustering' for snapshot clustering with number of days days: int Number of clustered days, only used when method = 'snapshot_clustering' Returns ------- network : :class:`pypsa.Network Overall container of PyPSA """ weighting = network.snapshot_weightings if method == 'extreme_situations': snapshots = find_snapshots(network, 'residual load') snapshots = snapshots.append(find_snapshots(network, 'wind_onshore')) snapshots = snapshots.append(find_snapshots(network, 'solar')) snapshots = snapshots.drop_duplicates() snapshots = snapshots.sort_values() if method == 'snapshot_clustering': network_cluster = snapshot_clustering(network, how='daily', clusters=days) snapshots = network_cluster.snapshots network.snapshot_weightings = network_cluster.snapshot_weightings # Set all lines and trafos extendable in network network.lines.loc[:, 's_nom_extendable'] = True network.lines.loc[:, 's_nom_min'] = network.lines.s_nom network.lines.loc[:, 's_nom_max'] = np.inf network.links.loc[:, 'p_nom_extendable'] = True network.links.loc[:, 'p_nom_min'] = network.links.p_nom network.links.loc[:, 'p_nom_max'] = np.inf network.transformers.loc[:, 's_nom_extendable'] = True network.transformers.loc[:, 's_nom_min'] = network.transformers.s_nom network.transformers.loc[:, 's_nom_max'] = np.inf network = set_line_costs(network) network = set_trafo_costs(network) network = convert_capital_costs(network, 1, 1) extended_lines = network.lines.index[network.lines.s_nom_opt > network.lines.s_nom] extended_links = network.links.index[network.links.p_nom_opt > network.links.p_nom] x = time.time() for i in range(int(snapshots.value_counts().sum())): if i > 0: network.lopf(snapshots[i], solver_name=args['solver']) extended_lines = extended_lines.append( network.lines.index[network.lines.s_nom_opt > network.lines.s_nom]) extended_lines = extended_lines.drop_duplicates() extended_links = extended_links.append( network.links.index[network.links.p_nom_opt > network.links.p_nom]) extended_links = extended_links.drop_duplicates() print("Number of preselected lines: ", len(extended_lines)) network.lines.loc[~network.lines.index.isin(extended_lines), 's_nom_extendable'] = False network.lines.loc[network.lines.s_nom_extendable, 's_nom_min']\ = network.lines.s_nom network.lines.loc[network.lines.s_nom_extendable, 's_nom_max']\ = np.inf network.links.loc[~network.links.index.isin(extended_links), 'p_nom_extendable'] = False network.links.loc[network.links.p_nom_extendable, 'p_nom_min']\ = network.links.p_nom network.links.loc[network.links.p_nom_extendable, 'p_nom_max']\ = np.inf network.snapshot_weightings = weighting network = set_line_costs(network) network = set_trafo_costs(network) network = convert_capital_costs(network, args['start_snapshot'], args['end_snapshot']) y = time.time() z1st = (y - x) / 60 print("Time for first LOPF [min]:", round(z1st, 2)) return network
def function[extension_preselection, parameter[network, args, method, days]]: constant[ Function that preselects lines which are extendend in snapshots leading to overloading to reduce nubmer of extension variables. Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA args : dict Arguments set in appl.py method: str Choose method of selection: 'extreme_situations' for remarkable timsteps (e.g. minimal resiudual load) 'snapshot_clustering' for snapshot clustering with number of days days: int Number of clustered days, only used when method = 'snapshot_clustering' Returns ------- network : :class:`pypsa.Network Overall container of PyPSA ] variable[weighting] assign[=] name[network].snapshot_weightings if compare[name[method] equal[==] constant[extreme_situations]] begin[:] variable[snapshots] assign[=] call[name[find_snapshots], parameter[name[network], constant[residual load]]] variable[snapshots] assign[=] call[name[snapshots].append, parameter[call[name[find_snapshots], parameter[name[network], constant[wind_onshore]]]]] variable[snapshots] assign[=] call[name[snapshots].append, parameter[call[name[find_snapshots], parameter[name[network], constant[solar]]]]] variable[snapshots] assign[=] call[name[snapshots].drop_duplicates, parameter[]] variable[snapshots] assign[=] call[name[snapshots].sort_values, parameter[]] if compare[name[method] equal[==] constant[snapshot_clustering]] begin[:] variable[network_cluster] assign[=] call[name[snapshot_clustering], parameter[name[network]]] variable[snapshots] assign[=] name[network_cluster].snapshots name[network].snapshot_weightings assign[=] name[network_cluster].snapshot_weightings call[name[network].lines.loc][tuple[[<ast.Slice object at 0x7da1b1a12a40>, <ast.Constant object at 0x7da1b1a13760>]]] assign[=] constant[True] call[name[network].lines.loc][tuple[[<ast.Slice object at 0x7da1b1a10fd0>, <ast.Constant object at 0x7da1b1a13790>]]] assign[=] name[network].lines.s_nom call[name[network].lines.loc][tuple[[<ast.Slice object at 0x7da1b1a120e0>, <ast.Constant object at 0x7da1b1a12170>]]] assign[=] name[np].inf call[name[network].links.loc][tuple[[<ast.Slice object at 0x7da1b1a11ea0>, <ast.Constant object at 0x7da1b1a12740>]]] assign[=] constant[True] call[name[network].links.loc][tuple[[<ast.Slice object at 0x7da1b1a10cd0>, <ast.Constant object at 0x7da1b1a10820>]]] assign[=] name[network].links.p_nom call[name[network].links.loc][tuple[[<ast.Slice object at 0x7da1b1a10670>, <ast.Constant object at 0x7da1b1a12bf0>]]] assign[=] name[np].inf call[name[network].transformers.loc][tuple[[<ast.Slice object at 0x7da1b1a13280>, <ast.Constant object at 0x7da1b1a10850>]]] assign[=] constant[True] call[name[network].transformers.loc][tuple[[<ast.Slice object at 0x7da1b1a130d0>, <ast.Constant object at 0x7da1b1a11ab0>]]] assign[=] name[network].transformers.s_nom call[name[network].transformers.loc][tuple[[<ast.Slice object at 0x7da1b1b9f010>, <ast.Constant object at 0x7da1b1b9fbb0>]]] assign[=] name[np].inf variable[network] assign[=] call[name[set_line_costs], parameter[name[network]]] variable[network] assign[=] call[name[set_trafo_costs], parameter[name[network]]] variable[network] assign[=] call[name[convert_capital_costs], parameter[name[network], constant[1], constant[1]]] variable[extended_lines] assign[=] call[name[network].lines.index][compare[name[network].lines.s_nom_opt greater[>] name[network].lines.s_nom]] variable[extended_links] assign[=] call[name[network].links.index][compare[name[network].links.p_nom_opt greater[>] name[network].links.p_nom]] variable[x] assign[=] call[name[time].time, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[int], parameter[call[call[name[snapshots].value_counts, parameter[]].sum, parameter[]]]]]]] begin[:] if compare[name[i] greater[>] constant[0]] begin[:] call[name[network].lopf, parameter[call[name[snapshots]][name[i]]]] variable[extended_lines] assign[=] call[name[extended_lines].append, parameter[call[name[network].lines.index][compare[name[network].lines.s_nom_opt greater[>] name[network].lines.s_nom]]]] variable[extended_lines] assign[=] call[name[extended_lines].drop_duplicates, parameter[]] variable[extended_links] assign[=] call[name[extended_links].append, parameter[call[name[network].links.index][compare[name[network].links.p_nom_opt greater[>] name[network].links.p_nom]]]] variable[extended_links] assign[=] call[name[extended_links].drop_duplicates, parameter[]] call[name[print], parameter[constant[Number of preselected lines: ], call[name[len], parameter[name[extended_lines]]]]] call[name[network].lines.loc][tuple[[<ast.UnaryOp object at 0x7da1b1b9d450>, <ast.Constant object at 0x7da1b1b9d690>]]] assign[=] constant[False] call[name[network].lines.loc][tuple[[<ast.Attribute object at 0x7da1b1b9cca0>, <ast.Constant object at 0x7da1b1b9c370>]]] assign[=] name[network].lines.s_nom call[name[network].lines.loc][tuple[[<ast.Attribute object at 0x7da1b1b9ff70>, <ast.Constant object at 0x7da1b1b9cfa0>]]] assign[=] name[np].inf call[name[network].links.loc][tuple[[<ast.UnaryOp object at 0x7da1b1b9c2e0>, <ast.Constant object at 0x7da1b1b9cfd0>]]] assign[=] constant[False] call[name[network].links.loc][tuple[[<ast.Attribute object at 0x7da1b1b9e650>, <ast.Constant object at 0x7da1b1b9fa30>]]] assign[=] name[network].links.p_nom call[name[network].links.loc][tuple[[<ast.Attribute object at 0x7da1b1b9c0a0>, <ast.Constant object at 0x7da1b1b9e2c0>]]] assign[=] name[np].inf name[network].snapshot_weightings assign[=] name[weighting] variable[network] assign[=] call[name[set_line_costs], parameter[name[network]]] variable[network] assign[=] call[name[set_trafo_costs], parameter[name[network]]] variable[network] assign[=] call[name[convert_capital_costs], parameter[name[network], call[name[args]][constant[start_snapshot]], call[name[args]][constant[end_snapshot]]]] variable[y] assign[=] call[name[time].time, parameter[]] variable[z1st] assign[=] binary_operation[binary_operation[name[y] - name[x]] / constant[60]] call[name[print], parameter[constant[Time for first LOPF [min]:], call[name[round], parameter[name[z1st], constant[2]]]]] return[name[network]]
keyword[def] identifier[extension_preselection] ( identifier[network] , identifier[args] , identifier[method] , identifier[days] = literal[int] ): literal[string] identifier[weighting] = identifier[network] . identifier[snapshot_weightings] keyword[if] identifier[method] == literal[string] : identifier[snapshots] = identifier[find_snapshots] ( identifier[network] , literal[string] ) identifier[snapshots] = identifier[snapshots] . identifier[append] ( identifier[find_snapshots] ( identifier[network] , literal[string] )) identifier[snapshots] = identifier[snapshots] . identifier[append] ( identifier[find_snapshots] ( identifier[network] , literal[string] )) identifier[snapshots] = identifier[snapshots] . identifier[drop_duplicates] () identifier[snapshots] = identifier[snapshots] . identifier[sort_values] () keyword[if] identifier[method] == literal[string] : identifier[network_cluster] = identifier[snapshot_clustering] ( identifier[network] , identifier[how] = literal[string] , identifier[clusters] = identifier[days] ) identifier[snapshots] = identifier[network_cluster] . identifier[snapshots] identifier[network] . identifier[snapshot_weightings] = identifier[network_cluster] . identifier[snapshot_weightings] identifier[network] . identifier[lines] . identifier[loc] [:, literal[string] ]= keyword[True] identifier[network] . identifier[lines] . identifier[loc] [:, literal[string] ]= identifier[network] . identifier[lines] . identifier[s_nom] identifier[network] . identifier[lines] . identifier[loc] [:, literal[string] ]= identifier[np] . identifier[inf] identifier[network] . identifier[links] . identifier[loc] [:, literal[string] ]= keyword[True] identifier[network] . identifier[links] . identifier[loc] [:, literal[string] ]= identifier[network] . identifier[links] . identifier[p_nom] identifier[network] . identifier[links] . identifier[loc] [:, literal[string] ]= identifier[np] . identifier[inf] identifier[network] . identifier[transformers] . identifier[loc] [:, literal[string] ]= keyword[True] identifier[network] . identifier[transformers] . identifier[loc] [:, literal[string] ]= identifier[network] . identifier[transformers] . identifier[s_nom] identifier[network] . identifier[transformers] . identifier[loc] [:, literal[string] ]= identifier[np] . identifier[inf] identifier[network] = identifier[set_line_costs] ( identifier[network] ) identifier[network] = identifier[set_trafo_costs] ( identifier[network] ) identifier[network] = identifier[convert_capital_costs] ( identifier[network] , literal[int] , literal[int] ) identifier[extended_lines] = identifier[network] . identifier[lines] . identifier[index] [ identifier[network] . identifier[lines] . identifier[s_nom_opt] > identifier[network] . identifier[lines] . identifier[s_nom] ] identifier[extended_links] = identifier[network] . identifier[links] . identifier[index] [ identifier[network] . identifier[links] . identifier[p_nom_opt] > identifier[network] . identifier[links] . identifier[p_nom] ] identifier[x] = identifier[time] . identifier[time] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[int] ( identifier[snapshots] . identifier[value_counts] (). identifier[sum] ())): keyword[if] identifier[i] > literal[int] : identifier[network] . identifier[lopf] ( identifier[snapshots] [ identifier[i] ], identifier[solver_name] = identifier[args] [ literal[string] ]) identifier[extended_lines] = identifier[extended_lines] . identifier[append] ( identifier[network] . identifier[lines] . identifier[index] [ identifier[network] . identifier[lines] . identifier[s_nom_opt] > identifier[network] . identifier[lines] . identifier[s_nom] ]) identifier[extended_lines] = identifier[extended_lines] . identifier[drop_duplicates] () identifier[extended_links] = identifier[extended_links] . identifier[append] ( identifier[network] . identifier[links] . identifier[index] [ identifier[network] . identifier[links] . identifier[p_nom_opt] > identifier[network] . identifier[links] . identifier[p_nom] ]) identifier[extended_links] = identifier[extended_links] . identifier[drop_duplicates] () identifier[print] ( literal[string] , identifier[len] ( identifier[extended_lines] )) identifier[network] . identifier[lines] . identifier[loc] [~ identifier[network] . identifier[lines] . identifier[index] . identifier[isin] ( identifier[extended_lines] ), literal[string] ]= keyword[False] identifier[network] . identifier[lines] . identifier[loc] [ identifier[network] . identifier[lines] . identifier[s_nom_extendable] , literal[string] ]= identifier[network] . identifier[lines] . identifier[s_nom] identifier[network] . identifier[lines] . identifier[loc] [ identifier[network] . identifier[lines] . identifier[s_nom_extendable] , literal[string] ]= identifier[np] . identifier[inf] identifier[network] . identifier[links] . identifier[loc] [~ identifier[network] . identifier[links] . identifier[index] . identifier[isin] ( identifier[extended_links] ), literal[string] ]= keyword[False] identifier[network] . identifier[links] . identifier[loc] [ identifier[network] . identifier[links] . identifier[p_nom_extendable] , literal[string] ]= identifier[network] . identifier[links] . identifier[p_nom] identifier[network] . identifier[links] . identifier[loc] [ identifier[network] . identifier[links] . identifier[p_nom_extendable] , literal[string] ]= identifier[np] . identifier[inf] identifier[network] . identifier[snapshot_weightings] = identifier[weighting] identifier[network] = identifier[set_line_costs] ( identifier[network] ) identifier[network] = identifier[set_trafo_costs] ( identifier[network] ) identifier[network] = identifier[convert_capital_costs] ( identifier[network] , identifier[args] [ literal[string] ], identifier[args] [ literal[string] ]) identifier[y] = identifier[time] . identifier[time] () identifier[z1st] =( identifier[y] - identifier[x] )/ literal[int] identifier[print] ( literal[string] , identifier[round] ( identifier[z1st] , literal[int] )) keyword[return] identifier[network]
def extension_preselection(network, args, method, days=3): """ Function that preselects lines which are extendend in snapshots leading to overloading to reduce nubmer of extension variables. Parameters ---------- network : :class:`pypsa.Network Overall container of PyPSA args : dict Arguments set in appl.py method: str Choose method of selection: 'extreme_situations' for remarkable timsteps (e.g. minimal resiudual load) 'snapshot_clustering' for snapshot clustering with number of days days: int Number of clustered days, only used when method = 'snapshot_clustering' Returns ------- network : :class:`pypsa.Network Overall container of PyPSA """ weighting = network.snapshot_weightings if method == 'extreme_situations': snapshots = find_snapshots(network, 'residual load') snapshots = snapshots.append(find_snapshots(network, 'wind_onshore')) snapshots = snapshots.append(find_snapshots(network, 'solar')) snapshots = snapshots.drop_duplicates() snapshots = snapshots.sort_values() # depends on [control=['if'], data=[]] if method == 'snapshot_clustering': network_cluster = snapshot_clustering(network, how='daily', clusters=days) snapshots = network_cluster.snapshots network.snapshot_weightings = network_cluster.snapshot_weightings # depends on [control=['if'], data=[]] # Set all lines and trafos extendable in network network.lines.loc[:, 's_nom_extendable'] = True network.lines.loc[:, 's_nom_min'] = network.lines.s_nom network.lines.loc[:, 's_nom_max'] = np.inf network.links.loc[:, 'p_nom_extendable'] = True network.links.loc[:, 'p_nom_min'] = network.links.p_nom network.links.loc[:, 'p_nom_max'] = np.inf network.transformers.loc[:, 's_nom_extendable'] = True network.transformers.loc[:, 's_nom_min'] = network.transformers.s_nom network.transformers.loc[:, 's_nom_max'] = np.inf network = set_line_costs(network) network = set_trafo_costs(network) network = convert_capital_costs(network, 1, 1) extended_lines = network.lines.index[network.lines.s_nom_opt > network.lines.s_nom] extended_links = network.links.index[network.links.p_nom_opt > network.links.p_nom] x = time.time() for i in range(int(snapshots.value_counts().sum())): if i > 0: network.lopf(snapshots[i], solver_name=args['solver']) extended_lines = extended_lines.append(network.lines.index[network.lines.s_nom_opt > network.lines.s_nom]) extended_lines = extended_lines.drop_duplicates() extended_links = extended_links.append(network.links.index[network.links.p_nom_opt > network.links.p_nom]) extended_links = extended_links.drop_duplicates() # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']] print('Number of preselected lines: ', len(extended_lines)) network.lines.loc[~network.lines.index.isin(extended_lines), 's_nom_extendable'] = False network.lines.loc[network.lines.s_nom_extendable, 's_nom_min'] = network.lines.s_nom network.lines.loc[network.lines.s_nom_extendable, 's_nom_max'] = np.inf network.links.loc[~network.links.index.isin(extended_links), 'p_nom_extendable'] = False network.links.loc[network.links.p_nom_extendable, 'p_nom_min'] = network.links.p_nom network.links.loc[network.links.p_nom_extendable, 'p_nom_max'] = np.inf network.snapshot_weightings = weighting network = set_line_costs(network) network = set_trafo_costs(network) network = convert_capital_costs(network, args['start_snapshot'], args['end_snapshot']) y = time.time() z1st = (y - x) / 60 print('Time for first LOPF [min]:', round(z1st, 2)) return network
def tag(iterable, tags=None, key='@tags'): """ Add tags to each dict or dict-like object in ``iterable``. Tags are added to each dict with a key set by ``key``. If a key already exists under the key given by ``key``, this function will attempt to ``.extend()``` it, but will fall back to replacing it in the event of error. """ if not tags: for item in iterable: yield item else: for item in iterable: yield _tag(item, tags, key)
def function[tag, parameter[iterable, tags, key]]: constant[ Add tags to each dict or dict-like object in ``iterable``. Tags are added to each dict with a key set by ``key``. If a key already exists under the key given by ``key``, this function will attempt to ``.extend()``` it, but will fall back to replacing it in the event of error. ] if <ast.UnaryOp object at 0x7da18f720730> begin[:] for taget[name[item]] in starred[name[iterable]] begin[:] <ast.Yield object at 0x7da18f721db0>
keyword[def] identifier[tag] ( identifier[iterable] , identifier[tags] = keyword[None] , identifier[key] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[tags] : keyword[for] identifier[item] keyword[in] identifier[iterable] : keyword[yield] identifier[item] keyword[else] : keyword[for] identifier[item] keyword[in] identifier[iterable] : keyword[yield] identifier[_tag] ( identifier[item] , identifier[tags] , identifier[key] )
def tag(iterable, tags=None, key='@tags'): """ Add tags to each dict or dict-like object in ``iterable``. Tags are added to each dict with a key set by ``key``. If a key already exists under the key given by ``key``, this function will attempt to ``.extend()``` it, but will fall back to replacing it in the event of error. """ if not tags: for item in iterable: yield item # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] else: for item in iterable: yield _tag(item, tags, key) # depends on [control=['for'], data=['item']]
def shift_annotations(self, time): """Shift all annotations in time. Annotations that are in the beginning and a left shift is applied can be squashed or discarded. :param int time: Time shift width, negative numbers make a left shift. :returns: Tuple of a list of squashed annotations and a list of removed annotations in the format: ``(tiername, start, end, value)``. """ total_re = [] total_sq = [] for name, tier in self.tiers.items(): squashed = [] for aid, (begin, end, value, _) in tier[0].items(): if self.timeslots[end]+time <= 0: squashed.append((name, aid)) elif self.timeslots[begin]+time < 0: total_sq.append((name, self.timeslots[begin], self.timeslots[end], value)) self.timeslots[begin] = 0 else: self.timeslots[begin] += time self.timeslots[end] += time for name, aid in squashed: start, end, value, _ = self.tiers[name][0][aid] del(self.tiers[name][0][aid]) del(self.annotations[aid]) total_re.append( (name, self.timeslots[start], self.timeslots[end], value)) return total_sq, total_re
def function[shift_annotations, parameter[self, time]]: constant[Shift all annotations in time. Annotations that are in the beginning and a left shift is applied can be squashed or discarded. :param int time: Time shift width, negative numbers make a left shift. :returns: Tuple of a list of squashed annotations and a list of removed annotations in the format: ``(tiername, start, end, value)``. ] variable[total_re] assign[=] list[[]] variable[total_sq] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b023fd30>, <ast.Name object at 0x7da1b023d4b0>]]] in starred[call[name[self].tiers.items, parameter[]]] begin[:] variable[squashed] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b023ff70>, <ast.Tuple object at 0x7da1b023e620>]]] in starred[call[call[name[tier]][constant[0]].items, parameter[]]] begin[:] if compare[binary_operation[call[name[self].timeslots][name[end]] + name[time]] less_or_equal[<=] constant[0]] begin[:] call[name[squashed].append, parameter[tuple[[<ast.Name object at 0x7da1b023e4a0>, <ast.Name object at 0x7da1b023e020>]]]] for taget[tuple[[<ast.Name object at 0x7da1b023dc90>, <ast.Name object at 0x7da1b023e2f0>]]] in starred[name[squashed]] begin[:] <ast.Tuple object at 0x7da1b023d960> assign[=] call[call[call[name[self].tiers][name[name]]][constant[0]]][name[aid]] <ast.Delete object at 0x7da1b0211a80> <ast.Delete object at 0x7da1b0210400> call[name[total_re].append, parameter[tuple[[<ast.Name object at 0x7da1b0213460>, <ast.Subscript object at 0x7da1b02115a0>, <ast.Subscript object at 0x7da1b0213c70>, <ast.Name object at 0x7da1b0210250>]]]] return[tuple[[<ast.Name object at 0x7da1b0213b50>, <ast.Name object at 0x7da1b0211720>]]]
keyword[def] identifier[shift_annotations] ( identifier[self] , identifier[time] ): literal[string] identifier[total_re] =[] identifier[total_sq] =[] keyword[for] identifier[name] , identifier[tier] keyword[in] identifier[self] . identifier[tiers] . identifier[items] (): identifier[squashed] =[] keyword[for] identifier[aid] ,( identifier[begin] , identifier[end] , identifier[value] , identifier[_] ) keyword[in] identifier[tier] [ literal[int] ]. identifier[items] (): keyword[if] identifier[self] . identifier[timeslots] [ identifier[end] ]+ identifier[time] <= literal[int] : identifier[squashed] . identifier[append] (( identifier[name] , identifier[aid] )) keyword[elif] identifier[self] . identifier[timeslots] [ identifier[begin] ]+ identifier[time] < literal[int] : identifier[total_sq] . identifier[append] (( identifier[name] , identifier[self] . identifier[timeslots] [ identifier[begin] ], identifier[self] . identifier[timeslots] [ identifier[end] ], identifier[value] )) identifier[self] . identifier[timeslots] [ identifier[begin] ]= literal[int] keyword[else] : identifier[self] . identifier[timeslots] [ identifier[begin] ]+= identifier[time] identifier[self] . identifier[timeslots] [ identifier[end] ]+= identifier[time] keyword[for] identifier[name] , identifier[aid] keyword[in] identifier[squashed] : identifier[start] , identifier[end] , identifier[value] , identifier[_] = identifier[self] . identifier[tiers] [ identifier[name] ][ literal[int] ][ identifier[aid] ] keyword[del] ( identifier[self] . identifier[tiers] [ identifier[name] ][ literal[int] ][ identifier[aid] ]) keyword[del] ( identifier[self] . identifier[annotations] [ identifier[aid] ]) identifier[total_re] . identifier[append] ( ( identifier[name] , identifier[self] . identifier[timeslots] [ identifier[start] ], identifier[self] . identifier[timeslots] [ identifier[end] ], identifier[value] )) keyword[return] identifier[total_sq] , identifier[total_re]
def shift_annotations(self, time): """Shift all annotations in time. Annotations that are in the beginning and a left shift is applied can be squashed or discarded. :param int time: Time shift width, negative numbers make a left shift. :returns: Tuple of a list of squashed annotations and a list of removed annotations in the format: ``(tiername, start, end, value)``. """ total_re = [] total_sq = [] for (name, tier) in self.tiers.items(): squashed = [] for (aid, (begin, end, value, _)) in tier[0].items(): if self.timeslots[end] + time <= 0: squashed.append((name, aid)) # depends on [control=['if'], data=[]] elif self.timeslots[begin] + time < 0: total_sq.append((name, self.timeslots[begin], self.timeslots[end], value)) self.timeslots[begin] = 0 # depends on [control=['if'], data=[]] else: self.timeslots[begin] += time self.timeslots[end] += time # depends on [control=['for'], data=[]] for (name, aid) in squashed: (start, end, value, _) = self.tiers[name][0][aid] del self.tiers[name][0][aid] del self.annotations[aid] total_re.append((name, self.timeslots[start], self.timeslots[end], value)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return (total_sq, total_re)
def convConn (self, preCellsTags, postCellsTags, connParam): from .. import sim ''' Generates connections between all pre and post-syn cells based on probability values''' if sim.cfg.verbose: print('Generating set of convergent connections (rule: %s) ...' % (connParam['label'])) # get list of params that have a lambda function paramsStrFunc = [param for param in [p+'Func' for p in self.connStringFuncParams] if param in connParam] # copy the vars into args immediately and work out which keys are associated with lambda functions only once per method funcKeys = {} for paramStrFunc in paramsStrFunc: connParam[paramStrFunc + 'Args'] = connParam[paramStrFunc + 'Vars'].copy() funcKeys[paramStrFunc] = [key for key in connParam[paramStrFunc + 'Vars'] if callable(connParam[paramStrFunc + 'Vars'][key])] # converted to list only once preCellsTagsKeys = sorted(preCellsTags) # calculate hash for post cell gids hashPreCells = sim.hashList(preCellsTagsKeys) for postCellGid,postCellTags in postCellsTags.items(): # for each postsyn cell if postCellGid in self.gid2lid: # check if postsyn is in this node convergence = connParam['convergenceFunc'][postCellGid] if 'convergenceFunc' in connParam else connParam['convergence'] # num of presyn conns / postsyn cell convergence = max(min(int(round(convergence)), len(preCellsTags)-1), 0) self.rand.Random123(hashPreCells, postCellGid, sim.cfg.seeds['conn']) # init randomizer randSample = self.randUniqueInt(self.rand, convergence+1, 0, len(preCellsTags)-1) # note: randSample[divergence] is an extra value used only if one of the random postGids coincided with the preGid preCellsSample = {preCellsTagsKeys[randSample[convergence]] if preCellsTagsKeys[i]==postCellGid else preCellsTagsKeys[i]:0 for i in randSample[0:convergence]} # dict of selected gids of postsyn cells with removed post gid preCellsConv = {k:v for k,v in preCellsTags.items() if k in preCellsSample} # dict of selected presyn cells tags for preCellGid, preCellTags in preCellsConv.items(): # for each presyn cell for paramStrFunc in paramsStrFunc: # call lambda functions to get weight func args # update the relevant FuncArgs dict where lambda functions are known to exist in the corresponding FuncVars dict for funcKey in funcKeys[paramStrFunc]: connParam[paramStrFunc + 'Args'][funcKey] = connParam[paramStrFunc+'Vars'][funcKey](preCellTags,postCellTags) if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid)
def function[convConn, parameter[self, preCellsTags, postCellsTags, connParam]]: from relative_module[None] import module[sim] constant[ Generates connections between all pre and post-syn cells based on probability values] if name[sim].cfg.verbose begin[:] call[name[print], parameter[binary_operation[constant[Generating set of convergent connections (rule: %s) ...] <ast.Mod object at 0x7da2590d6920> call[name[connParam]][constant[label]]]]] variable[paramsStrFunc] assign[=] <ast.ListComp object at 0x7da1b12c5930> variable[funcKeys] assign[=] dictionary[[], []] for taget[name[paramStrFunc]] in starred[name[paramsStrFunc]] begin[:] call[name[connParam]][binary_operation[name[paramStrFunc] + constant[Args]]] assign[=] call[call[name[connParam]][binary_operation[name[paramStrFunc] + constant[Vars]]].copy, parameter[]] call[name[funcKeys]][name[paramStrFunc]] assign[=] <ast.ListComp object at 0x7da1b12c4ee0> variable[preCellsTagsKeys] assign[=] call[name[sorted], parameter[name[preCellsTags]]] variable[hashPreCells] assign[=] call[name[sim].hashList, parameter[name[preCellsTagsKeys]]] for taget[tuple[[<ast.Name object at 0x7da1b12c4610>, <ast.Name object at 0x7da1b12c46a0>]]] in starred[call[name[postCellsTags].items, parameter[]]] begin[:] if compare[name[postCellGid] in name[self].gid2lid] begin[:] variable[convergence] assign[=] <ast.IfExp object at 0x7da1b12c48b0> variable[convergence] assign[=] call[name[max], parameter[call[name[min], parameter[call[name[int], parameter[call[name[round], parameter[name[convergence]]]]], binary_operation[call[name[len], parameter[name[preCellsTags]]] - constant[1]]]], constant[0]]] call[name[self].rand.Random123, parameter[name[hashPreCells], name[postCellGid], call[name[sim].cfg.seeds][constant[conn]]]] variable[randSample] assign[=] call[name[self].randUniqueInt, parameter[name[self].rand, binary_operation[name[convergence] + constant[1]], constant[0], binary_operation[call[name[len], parameter[name[preCellsTags]]] - constant[1]]]] variable[preCellsSample] assign[=] <ast.DictComp object at 0x7da1b110caf0> variable[preCellsConv] assign[=] <ast.DictComp object at 0x7da1b110ded0> for taget[tuple[[<ast.Name object at 0x7da1b110d900>, <ast.Name object at 0x7da1b110d8d0>]]] in starred[call[name[preCellsConv].items, parameter[]]] begin[:] for taget[name[paramStrFunc]] in starred[name[paramsStrFunc]] begin[:] for taget[name[funcKey]] in starred[call[name[funcKeys]][name[paramStrFunc]]] begin[:] call[call[name[connParam]][binary_operation[name[paramStrFunc] + constant[Args]]]][name[funcKey]] assign[=] call[call[call[name[connParam]][binary_operation[name[paramStrFunc] + constant[Vars]]]][name[funcKey]], parameter[name[preCellTags], name[postCellTags]]] if compare[name[preCellGid] not_equal[!=] name[postCellGid]] begin[:] call[name[self]._addCellConn, parameter[name[connParam], name[preCellGid], name[postCellGid]]]
keyword[def] identifier[convConn] ( identifier[self] , identifier[preCellsTags] , identifier[postCellsTags] , identifier[connParam] ): keyword[from] .. keyword[import] identifier[sim] literal[string] keyword[if] identifier[sim] . identifier[cfg] . identifier[verbose] : identifier[print] ( literal[string] %( identifier[connParam] [ literal[string] ])) identifier[paramsStrFunc] =[ identifier[param] keyword[for] identifier[param] keyword[in] [ identifier[p] + literal[string] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[connStringFuncParams] ] keyword[if] identifier[param] keyword[in] identifier[connParam] ] identifier[funcKeys] ={} keyword[for] identifier[paramStrFunc] keyword[in] identifier[paramsStrFunc] : identifier[connParam] [ identifier[paramStrFunc] + literal[string] ]= identifier[connParam] [ identifier[paramStrFunc] + literal[string] ]. identifier[copy] () identifier[funcKeys] [ identifier[paramStrFunc] ]=[ identifier[key] keyword[for] identifier[key] keyword[in] identifier[connParam] [ identifier[paramStrFunc] + literal[string] ] keyword[if] identifier[callable] ( identifier[connParam] [ identifier[paramStrFunc] + literal[string] ][ identifier[key] ])] identifier[preCellsTagsKeys] = identifier[sorted] ( identifier[preCellsTags] ) identifier[hashPreCells] = identifier[sim] . identifier[hashList] ( identifier[preCellsTagsKeys] ) keyword[for] identifier[postCellGid] , identifier[postCellTags] keyword[in] identifier[postCellsTags] . identifier[items] (): keyword[if] identifier[postCellGid] keyword[in] identifier[self] . identifier[gid2lid] : identifier[convergence] = identifier[connParam] [ literal[string] ][ identifier[postCellGid] ] keyword[if] literal[string] keyword[in] identifier[connParam] keyword[else] identifier[connParam] [ literal[string] ] identifier[convergence] = identifier[max] ( identifier[min] ( identifier[int] ( identifier[round] ( identifier[convergence] )), identifier[len] ( identifier[preCellsTags] )- literal[int] ), literal[int] ) identifier[self] . identifier[rand] . identifier[Random123] ( identifier[hashPreCells] , identifier[postCellGid] , identifier[sim] . identifier[cfg] . identifier[seeds] [ literal[string] ]) identifier[randSample] = identifier[self] . identifier[randUniqueInt] ( identifier[self] . identifier[rand] , identifier[convergence] + literal[int] , literal[int] , identifier[len] ( identifier[preCellsTags] )- literal[int] ) identifier[preCellsSample] ={ identifier[preCellsTagsKeys] [ identifier[randSample] [ identifier[convergence] ]] keyword[if] identifier[preCellsTagsKeys] [ identifier[i] ]== identifier[postCellGid] keyword[else] identifier[preCellsTagsKeys] [ identifier[i] ]: literal[int] keyword[for] identifier[i] keyword[in] identifier[randSample] [ literal[int] : identifier[convergence] ]} identifier[preCellsConv] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[preCellsTags] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[preCellsSample] } keyword[for] identifier[preCellGid] , identifier[preCellTags] keyword[in] identifier[preCellsConv] . identifier[items] (): keyword[for] identifier[paramStrFunc] keyword[in] identifier[paramsStrFunc] : keyword[for] identifier[funcKey] keyword[in] identifier[funcKeys] [ identifier[paramStrFunc] ]: identifier[connParam] [ identifier[paramStrFunc] + literal[string] ][ identifier[funcKey] ]= identifier[connParam] [ identifier[paramStrFunc] + literal[string] ][ identifier[funcKey] ]( identifier[preCellTags] , identifier[postCellTags] ) keyword[if] identifier[preCellGid] != identifier[postCellGid] : identifier[self] . identifier[_addCellConn] ( identifier[connParam] , identifier[preCellGid] , identifier[postCellGid] )
def convConn(self, preCellsTags, postCellsTags, connParam): from .. import sim ' Generates connections between all pre and post-syn cells based on probability values' if sim.cfg.verbose: print('Generating set of convergent connections (rule: %s) ...' % connParam['label']) # depends on [control=['if'], data=[]] # get list of params that have a lambda function paramsStrFunc = [param for param in [p + 'Func' for p in self.connStringFuncParams] if param in connParam] # copy the vars into args immediately and work out which keys are associated with lambda functions only once per method funcKeys = {} for paramStrFunc in paramsStrFunc: connParam[paramStrFunc + 'Args'] = connParam[paramStrFunc + 'Vars'].copy() funcKeys[paramStrFunc] = [key for key in connParam[paramStrFunc + 'Vars'] if callable(connParam[paramStrFunc + 'Vars'][key])] # depends on [control=['for'], data=['paramStrFunc']] # converted to list only once preCellsTagsKeys = sorted(preCellsTags) # calculate hash for post cell gids hashPreCells = sim.hashList(preCellsTagsKeys) for (postCellGid, postCellTags) in postCellsTags.items(): # for each postsyn cell if postCellGid in self.gid2lid: # check if postsyn is in this node convergence = connParam['convergenceFunc'][postCellGid] if 'convergenceFunc' in connParam else connParam['convergence'] # num of presyn conns / postsyn cell convergence = max(min(int(round(convergence)), len(preCellsTags) - 1), 0) self.rand.Random123(hashPreCells, postCellGid, sim.cfg.seeds['conn']) # init randomizer randSample = self.randUniqueInt(self.rand, convergence + 1, 0, len(preCellsTags) - 1) # note: randSample[divergence] is an extra value used only if one of the random postGids coincided with the preGid preCellsSample = {preCellsTagsKeys[randSample[convergence]] if preCellsTagsKeys[i] == postCellGid else preCellsTagsKeys[i]: 0 for i in randSample[0:convergence]} # dict of selected gids of postsyn cells with removed post gid preCellsConv = {k: v for (k, v) in preCellsTags.items() if k in preCellsSample} # dict of selected presyn cells tags for (preCellGid, preCellTags) in preCellsConv.items(): # for each presyn cell for paramStrFunc in paramsStrFunc: # call lambda functions to get weight func args # update the relevant FuncArgs dict where lambda functions are known to exist in the corresponding FuncVars dict for funcKey in funcKeys[paramStrFunc]: connParam[paramStrFunc + 'Args'][funcKey] = connParam[paramStrFunc + 'Vars'][funcKey](preCellTags, postCellTags) # depends on [control=['for'], data=['funcKey']] # depends on [control=['for'], data=['paramStrFunc']] if preCellGid != postCellGid: # if not self-connection self._addCellConn(connParam, preCellGid, postCellGid) # depends on [control=['if'], data=['preCellGid', 'postCellGid']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['postCellGid']] # depends on [control=['for'], data=[]]
def get_neuroml_from_sonata(sonata_filename, id, generate_lems = True, format='xml'): """ Return a NeuroMLDocument with (most of) the contents of the Sonata model """ from neuroml.hdf5.NetworkBuilder import NetworkBuilder neuroml_handler = NetworkBuilder() sr = SonataReader(filename=sonata_filename, id=id) sr.parse(neuroml_handler) nml_doc = neuroml_handler.get_nml_doc() sr.add_neuroml_components(nml_doc) if format == 'xml': nml_file_name = '%s.net.nml'%id from neuroml.writers import NeuroMLWriter NeuroMLWriter.write(nml_doc, nml_file_name) elif format == 'hdf5': nml_file_name = '%s.net.nml.h5'%id from neuroml.writers import NeuroMLHdf5Writer NeuroMLHdf5Writer.write(nml_doc, nml_file_name) print_v('Written to: %s'%nml_file_name) if generate_lems: lems_file_name = sr.generate_lems_file(nml_file_name, nml_doc) return sr, lems_file_name, nml_file_name, nml_doc return nml_doc
def function[get_neuroml_from_sonata, parameter[sonata_filename, id, generate_lems, format]]: constant[ Return a NeuroMLDocument with (most of) the contents of the Sonata model ] from relative_module[neuroml.hdf5.NetworkBuilder] import module[NetworkBuilder] variable[neuroml_handler] assign[=] call[name[NetworkBuilder], parameter[]] variable[sr] assign[=] call[name[SonataReader], parameter[]] call[name[sr].parse, parameter[name[neuroml_handler]]] variable[nml_doc] assign[=] call[name[neuroml_handler].get_nml_doc, parameter[]] call[name[sr].add_neuroml_components, parameter[name[nml_doc]]] if compare[name[format] equal[==] constant[xml]] begin[:] variable[nml_file_name] assign[=] binary_operation[constant[%s.net.nml] <ast.Mod object at 0x7da2590d6920> name[id]] from relative_module[neuroml.writers] import module[NeuroMLWriter] call[name[NeuroMLWriter].write, parameter[name[nml_doc], name[nml_file_name]]] call[name[print_v], parameter[binary_operation[constant[Written to: %s] <ast.Mod object at 0x7da2590d6920> name[nml_file_name]]]] if name[generate_lems] begin[:] variable[lems_file_name] assign[=] call[name[sr].generate_lems_file, parameter[name[nml_file_name], name[nml_doc]]] return[tuple[[<ast.Name object at 0x7da1b1816380>, <ast.Name object at 0x7da1b18175e0>, <ast.Name object at 0x7da1b18152a0>, <ast.Name object at 0x7da1b1817520>]]] return[name[nml_doc]]
keyword[def] identifier[get_neuroml_from_sonata] ( identifier[sonata_filename] , identifier[id] , identifier[generate_lems] = keyword[True] , identifier[format] = literal[string] ): literal[string] keyword[from] identifier[neuroml] . identifier[hdf5] . identifier[NetworkBuilder] keyword[import] identifier[NetworkBuilder] identifier[neuroml_handler] = identifier[NetworkBuilder] () identifier[sr] = identifier[SonataReader] ( identifier[filename] = identifier[sonata_filename] , identifier[id] = identifier[id] ) identifier[sr] . identifier[parse] ( identifier[neuroml_handler] ) identifier[nml_doc] = identifier[neuroml_handler] . identifier[get_nml_doc] () identifier[sr] . identifier[add_neuroml_components] ( identifier[nml_doc] ) keyword[if] identifier[format] == literal[string] : identifier[nml_file_name] = literal[string] % identifier[id] keyword[from] identifier[neuroml] . identifier[writers] keyword[import] identifier[NeuroMLWriter] identifier[NeuroMLWriter] . identifier[write] ( identifier[nml_doc] , identifier[nml_file_name] ) keyword[elif] identifier[format] == literal[string] : identifier[nml_file_name] = literal[string] % identifier[id] keyword[from] identifier[neuroml] . identifier[writers] keyword[import] identifier[NeuroMLHdf5Writer] identifier[NeuroMLHdf5Writer] . identifier[write] ( identifier[nml_doc] , identifier[nml_file_name] ) identifier[print_v] ( literal[string] % identifier[nml_file_name] ) keyword[if] identifier[generate_lems] : identifier[lems_file_name] = identifier[sr] . identifier[generate_lems_file] ( identifier[nml_file_name] , identifier[nml_doc] ) keyword[return] identifier[sr] , identifier[lems_file_name] , identifier[nml_file_name] , identifier[nml_doc] keyword[return] identifier[nml_doc]
def get_neuroml_from_sonata(sonata_filename, id, generate_lems=True, format='xml'): """ Return a NeuroMLDocument with (most of) the contents of the Sonata model """ from neuroml.hdf5.NetworkBuilder import NetworkBuilder neuroml_handler = NetworkBuilder() sr = SonataReader(filename=sonata_filename, id=id) sr.parse(neuroml_handler) nml_doc = neuroml_handler.get_nml_doc() sr.add_neuroml_components(nml_doc) if format == 'xml': nml_file_name = '%s.net.nml' % id from neuroml.writers import NeuroMLWriter NeuroMLWriter.write(nml_doc, nml_file_name) # depends on [control=['if'], data=[]] elif format == 'hdf5': nml_file_name = '%s.net.nml.h5' % id from neuroml.writers import NeuroMLHdf5Writer NeuroMLHdf5Writer.write(nml_doc, nml_file_name) # depends on [control=['if'], data=[]] print_v('Written to: %s' % nml_file_name) if generate_lems: lems_file_name = sr.generate_lems_file(nml_file_name, nml_doc) return (sr, lems_file_name, nml_file_name, nml_doc) # depends on [control=['if'], data=[]] return nml_doc
def broken_chains(samples, chains): """Find the broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is broken. Examples: >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 1], [2, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[True, True], [ False, False]]) >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 2], [1, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[False, False], [ True, True]]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError("expected samples to be a numpy 2D array") num_samples, num_variables = samples.shape num_chains = len(chains) broken = np.zeros((num_samples, num_chains), dtype=bool, order='F') for cidx, chain in enumerate(chains): if isinstance(chain, set): chain = list(chain) chain = np.asarray(chain) if chain.ndim > 1: raise ValueError("chains should be 1D array_like objects") # chains of length 1, or 0 cannot be broken if len(chain) <= 1: continue all_ = (samples[:, chain] == 1).all(axis=1) any_ = (samples[:, chain] == 1).any(axis=1) broken[:, cidx] = np.bitwise_xor(all_, any_) return broken
def function[broken_chains, parameter[samples, chains]]: constant[Find the broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is broken. Examples: >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 1], [2, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[True, True], [ False, False]]) >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 2], [1, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[False, False], [ True, True]]) ] variable[samples] assign[=] call[name[np].asarray, parameter[name[samples]]] if compare[name[samples].ndim not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da1b0f068c0> <ast.Tuple object at 0x7da1b0f05660> assign[=] name[samples].shape variable[num_chains] assign[=] call[name[len], parameter[name[chains]]] variable[broken] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b0f076a0>, <ast.Name object at 0x7da1b0f05570>]]]] for taget[tuple[[<ast.Name object at 0x7da1b0f04f40>, <ast.Name object at 0x7da1b0f07880>]]] in starred[call[name[enumerate], parameter[name[chains]]]] begin[:] if call[name[isinstance], parameter[name[chain], name[set]]] begin[:] variable[chain] assign[=] call[name[list], parameter[name[chain]]] variable[chain] assign[=] call[name[np].asarray, parameter[name[chain]]] if compare[name[chain].ndim greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b0f075e0> if compare[call[name[len], parameter[name[chain]]] less_or_equal[<=] constant[1]] begin[:] continue variable[all_] assign[=] call[compare[call[name[samples]][tuple[[<ast.Slice object at 0x7da1b0f073a0>, <ast.Name object at 0x7da1b0f07c10>]]] equal[==] constant[1]].all, parameter[]] variable[any_] assign[=] call[compare[call[name[samples]][tuple[[<ast.Slice object at 0x7da1b0f05f30>, <ast.Name object at 0x7da1b0f05450>]]] equal[==] constant[1]].any, parameter[]] call[name[broken]][tuple[[<ast.Slice object at 0x7da1b0f06f80>, <ast.Name object at 0x7da1b0f050c0>]]] assign[=] call[name[np].bitwise_xor, parameter[name[all_], name[any_]]] return[name[broken]]
keyword[def] identifier[broken_chains] ( identifier[samples] , identifier[chains] ): literal[string] identifier[samples] = identifier[np] . identifier[asarray] ( identifier[samples] ) keyword[if] identifier[samples] . identifier[ndim] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[num_samples] , identifier[num_variables] = identifier[samples] . identifier[shape] identifier[num_chains] = identifier[len] ( identifier[chains] ) identifier[broken] = identifier[np] . identifier[zeros] (( identifier[num_samples] , identifier[num_chains] ), identifier[dtype] = identifier[bool] , identifier[order] = literal[string] ) keyword[for] identifier[cidx] , identifier[chain] keyword[in] identifier[enumerate] ( identifier[chains] ): keyword[if] identifier[isinstance] ( identifier[chain] , identifier[set] ): identifier[chain] = identifier[list] ( identifier[chain] ) identifier[chain] = identifier[np] . identifier[asarray] ( identifier[chain] ) keyword[if] identifier[chain] . identifier[ndim] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[chain] )<= literal[int] : keyword[continue] identifier[all_] =( identifier[samples] [:, identifier[chain] ]== literal[int] ). identifier[all] ( identifier[axis] = literal[int] ) identifier[any_] =( identifier[samples] [:, identifier[chain] ]== literal[int] ). identifier[any] ( identifier[axis] = literal[int] ) identifier[broken] [:, identifier[cidx] ]= identifier[np] . identifier[bitwise_xor] ( identifier[all_] , identifier[any_] ) keyword[return] identifier[broken]
def broken_chains(samples, chains): """Find the broken chains. Args: samples (array_like): Samples as a nS x nV array_like object where nS is the number of samples and nV is the number of variables. The values should all be 0/1 or -1/+1. chains (list[array_like]): List of chains of length nC where nC is the number of chains. Each chain should be an array_like collection of column indices in samples. Returns: :obj:`numpy.ndarray`: A nS x nC boolean array. If i, j is True, then chain j in sample i is broken. Examples: >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 1], [2, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[True, True], [ False, False]]) >>> samples = np.array([[-1, +1, -1, +1], [-1, -1, +1, +1]], dtype=np.int8) >>> chains = [[0, 2], [1, 3]] >>> dwave.embedding.broken_chains(samples, chains) array([[False, False], [ True, True]]) """ samples = np.asarray(samples) if samples.ndim != 2: raise ValueError('expected samples to be a numpy 2D array') # depends on [control=['if'], data=[]] (num_samples, num_variables) = samples.shape num_chains = len(chains) broken = np.zeros((num_samples, num_chains), dtype=bool, order='F') for (cidx, chain) in enumerate(chains): if isinstance(chain, set): chain = list(chain) # depends on [control=['if'], data=[]] chain = np.asarray(chain) if chain.ndim > 1: raise ValueError('chains should be 1D array_like objects') # depends on [control=['if'], data=[]] # chains of length 1, or 0 cannot be broken if len(chain) <= 1: continue # depends on [control=['if'], data=[]] all_ = (samples[:, chain] == 1).all(axis=1) any_ = (samples[:, chain] == 1).any(axis=1) broken[:, cidx] = np.bitwise_xor(all_, any_) # depends on [control=['for'], data=[]] return broken
def print_data(data_sources): """ Print dataset information in tabular form """ if not data_sources: return headers = ["DATA NAME", "CREATED", "STATUS", "DISK USAGE"] data_list = [] for data_source in data_sources: data_list.append([data_source.name, data_source.created_pretty, data_source.state, data_source.size]) floyd_logger.info(tabulate(data_list, headers=headers))
def function[print_data, parameter[data_sources]]: constant[ Print dataset information in tabular form ] if <ast.UnaryOp object at 0x7da1b0d0f550> begin[:] return[None] variable[headers] assign[=] list[[<ast.Constant object at 0x7da1b0d0dea0>, <ast.Constant object at 0x7da1b0d0cfa0>, <ast.Constant object at 0x7da1b0d0eb90>, <ast.Constant object at 0x7da1b0d0f040>]] variable[data_list] assign[=] list[[]] for taget[name[data_source]] in starred[name[data_sources]] begin[:] call[name[data_list].append, parameter[list[[<ast.Attribute object at 0x7da1b0d0e9b0>, <ast.Attribute object at 0x7da1b0d0d630>, <ast.Attribute object at 0x7da1b0d0ebc0>, <ast.Attribute object at 0x7da1b0d0d2d0>]]]] call[name[floyd_logger].info, parameter[call[name[tabulate], parameter[name[data_list]]]]]
keyword[def] identifier[print_data] ( identifier[data_sources] ): literal[string] keyword[if] keyword[not] identifier[data_sources] : keyword[return] identifier[headers] =[ literal[string] , literal[string] , literal[string] , literal[string] ] identifier[data_list] =[] keyword[for] identifier[data_source] keyword[in] identifier[data_sources] : identifier[data_list] . identifier[append] ([ identifier[data_source] . identifier[name] , identifier[data_source] . identifier[created_pretty] , identifier[data_source] . identifier[state] , identifier[data_source] . identifier[size] ]) identifier[floyd_logger] . identifier[info] ( identifier[tabulate] ( identifier[data_list] , identifier[headers] = identifier[headers] ))
def print_data(data_sources): """ Print dataset information in tabular form """ if not data_sources: return # depends on [control=['if'], data=[]] headers = ['DATA NAME', 'CREATED', 'STATUS', 'DISK USAGE'] data_list = [] for data_source in data_sources: data_list.append([data_source.name, data_source.created_pretty, data_source.state, data_source.size]) # depends on [control=['for'], data=['data_source']] floyd_logger.info(tabulate(data_list, headers=headers))
def iter_rows(self, start=None, end=None): """Iterate each of the Region rows in this region""" start = start or 0 end = end or self.nrows for i in range(start, end): yield self.iloc[i, :]
def function[iter_rows, parameter[self, start, end]]: constant[Iterate each of the Region rows in this region] variable[start] assign[=] <ast.BoolOp object at 0x7da1b1d39b70> variable[end] assign[=] <ast.BoolOp object at 0x7da1b1d3b9a0> for taget[name[i]] in starred[call[name[range], parameter[name[start], name[end]]]] begin[:] <ast.Yield object at 0x7da1b1d39990>
keyword[def] identifier[iter_rows] ( identifier[self] , identifier[start] = keyword[None] , identifier[end] = keyword[None] ): literal[string] identifier[start] = identifier[start] keyword[or] literal[int] identifier[end] = identifier[end] keyword[or] identifier[self] . identifier[nrows] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start] , identifier[end] ): keyword[yield] identifier[self] . identifier[iloc] [ identifier[i] ,:]
def iter_rows(self, start=None, end=None): """Iterate each of the Region rows in this region""" start = start or 0 end = end or self.nrows for i in range(start, end): yield self.iloc[i, :] # depends on [control=['for'], data=['i']]
def IntGreaterThanZero(n): """If *n* is an integer > 0, returns it, otherwise an error.""" try: n = int(n) except: raise ValueError("%s is not an integer" % n) if n <= 0: raise ValueError("%d is not > 0" % n) else: return n
def function[IntGreaterThanZero, parameter[n]]: constant[If *n* is an integer > 0, returns it, otherwise an error.] <ast.Try object at 0x7da1b2346110> if compare[name[n] less_or_equal[<=] constant[0]] begin[:] <ast.Raise object at 0x7da1b2344a90>
keyword[def] identifier[IntGreaterThanZero] ( identifier[n] ): literal[string] keyword[try] : identifier[n] = identifier[int] ( identifier[n] ) keyword[except] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[n] ) keyword[if] identifier[n] <= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[n] ) keyword[else] : keyword[return] identifier[n]
def IntGreaterThanZero(n): """If *n* is an integer > 0, returns it, otherwise an error.""" try: n = int(n) # depends on [control=['try'], data=[]] except: raise ValueError('%s is not an integer' % n) # depends on [control=['except'], data=[]] if n <= 0: raise ValueError('%d is not > 0' % n) # depends on [control=['if'], data=['n']] else: return n
def project(self, x, vector): '''Project a vector (gradient or direction) on the active constraints. Arguments: | ``x`` -- The unknowns. | ``vector`` -- A numpy array with a direction or a gradient. The return value is a gradient or direction, where the components that point away from the constraints are projected out. In case of half-open constraints, the projection is only active of the vector points into the infeasible region. ''' scale = np.linalg.norm(vector) if scale == 0.0: return vector self.lock[:] = False normals, signs = self._compute_equations(x)[::3] if len(normals) == 0: return vector vector = vector/scale mask = signs == 0 result = vector.copy() changed = True counter = 0 while changed: changed = False y = np.dot(normals, result) for i, sign in enumerate(signs): if sign != 0: if sign*y[i] < -self.threshold: mask[i] = True changed = True elif mask[i] and np.dot(normals[i], result-vector) < 0: mask[i] = False changed = True if mask.any(): normals_select = normals[mask] y = np.dot(normals_select, vector) U, S, Vt = np.linalg.svd(normals_select, full_matrices=False) if S.min() == 0.0: Sinv = S/(S**2+self.rcond1) else: Sinv = 1.0/S result = vector - np.dot(Vt.transpose(), np.dot(U.transpose(), y)*Sinv) else: result = vector.copy() if counter > self.max_iter: raise ConstraintError('Exceeded maximum number of shake iterations.') counter += 1 return result*scale
def function[project, parameter[self, x, vector]]: constant[Project a vector (gradient or direction) on the active constraints. Arguments: | ``x`` -- The unknowns. | ``vector`` -- A numpy array with a direction or a gradient. The return value is a gradient or direction, where the components that point away from the constraints are projected out. In case of half-open constraints, the projection is only active of the vector points into the infeasible region. ] variable[scale] assign[=] call[name[np].linalg.norm, parameter[name[vector]]] if compare[name[scale] equal[==] constant[0.0]] begin[:] return[name[vector]] call[name[self].lock][<ast.Slice object at 0x7da18eb55210>] assign[=] constant[False] <ast.Tuple object at 0x7da18eb56110> assign[=] call[call[name[self]._compute_equations, parameter[name[x]]]][<ast.Slice object at 0x7da18eb57b50>] if compare[call[name[len], parameter[name[normals]]] equal[==] constant[0]] begin[:] return[name[vector]] variable[vector] assign[=] binary_operation[name[vector] / name[scale]] variable[mask] assign[=] compare[name[signs] equal[==] constant[0]] variable[result] assign[=] call[name[vector].copy, parameter[]] variable[changed] assign[=] constant[True] variable[counter] assign[=] constant[0] while name[changed] begin[:] variable[changed] assign[=] constant[False] variable[y] assign[=] call[name[np].dot, parameter[name[normals], name[result]]] for taget[tuple[[<ast.Name object at 0x7da18eb55600>, <ast.Name object at 0x7da18eb56ef0>]]] in starred[call[name[enumerate], parameter[name[signs]]]] begin[:] if compare[name[sign] not_equal[!=] constant[0]] begin[:] if compare[binary_operation[name[sign] * call[name[y]][name[i]]] less[<] <ast.UnaryOp object at 0x7da18eb560e0>] begin[:] call[name[mask]][name[i]] assign[=] constant[True] variable[changed] assign[=] constant[True] if call[name[mask].any, parameter[]] begin[:] variable[normals_select] assign[=] call[name[normals]][name[mask]] variable[y] assign[=] call[name[np].dot, parameter[name[normals_select], name[vector]]] <ast.Tuple object at 0x7da2047e97b0> assign[=] call[name[np].linalg.svd, parameter[name[normals_select]]] if compare[call[name[S].min, parameter[]] equal[==] constant[0.0]] begin[:] variable[Sinv] assign[=] binary_operation[name[S] / binary_operation[binary_operation[name[S] ** constant[2]] + name[self].rcond1]] variable[result] assign[=] binary_operation[name[vector] - call[name[np].dot, parameter[call[name[Vt].transpose, parameter[]], binary_operation[call[name[np].dot, parameter[call[name[U].transpose, parameter[]], name[y]]] * name[Sinv]]]]] if compare[name[counter] greater[>] name[self].max_iter] begin[:] <ast.Raise object at 0x7da2047e8550> <ast.AugAssign object at 0x7da2047e8730> return[binary_operation[name[result] * name[scale]]]
keyword[def] identifier[project] ( identifier[self] , identifier[x] , identifier[vector] ): literal[string] identifier[scale] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[vector] ) keyword[if] identifier[scale] == literal[int] : keyword[return] identifier[vector] identifier[self] . identifier[lock] [:]= keyword[False] identifier[normals] , identifier[signs] = identifier[self] . identifier[_compute_equations] ( identifier[x] )[:: literal[int] ] keyword[if] identifier[len] ( identifier[normals] )== literal[int] : keyword[return] identifier[vector] identifier[vector] = identifier[vector] / identifier[scale] identifier[mask] = identifier[signs] == literal[int] identifier[result] = identifier[vector] . identifier[copy] () identifier[changed] = keyword[True] identifier[counter] = literal[int] keyword[while] identifier[changed] : identifier[changed] = keyword[False] identifier[y] = identifier[np] . identifier[dot] ( identifier[normals] , identifier[result] ) keyword[for] identifier[i] , identifier[sign] keyword[in] identifier[enumerate] ( identifier[signs] ): keyword[if] identifier[sign] != literal[int] : keyword[if] identifier[sign] * identifier[y] [ identifier[i] ]<- identifier[self] . identifier[threshold] : identifier[mask] [ identifier[i] ]= keyword[True] identifier[changed] = keyword[True] keyword[elif] identifier[mask] [ identifier[i] ] keyword[and] identifier[np] . identifier[dot] ( identifier[normals] [ identifier[i] ], identifier[result] - identifier[vector] )< literal[int] : identifier[mask] [ identifier[i] ]= keyword[False] identifier[changed] = keyword[True] keyword[if] identifier[mask] . identifier[any] (): identifier[normals_select] = identifier[normals] [ identifier[mask] ] identifier[y] = identifier[np] . identifier[dot] ( identifier[normals_select] , identifier[vector] ) identifier[U] , identifier[S] , identifier[Vt] = identifier[np] . identifier[linalg] . identifier[svd] ( identifier[normals_select] , identifier[full_matrices] = keyword[False] ) keyword[if] identifier[S] . identifier[min] ()== literal[int] : identifier[Sinv] = identifier[S] /( identifier[S] ** literal[int] + identifier[self] . identifier[rcond1] ) keyword[else] : identifier[Sinv] = literal[int] / identifier[S] identifier[result] = identifier[vector] - identifier[np] . identifier[dot] ( identifier[Vt] . identifier[transpose] (), identifier[np] . identifier[dot] ( identifier[U] . identifier[transpose] (), identifier[y] )* identifier[Sinv] ) keyword[else] : identifier[result] = identifier[vector] . identifier[copy] () keyword[if] identifier[counter] > identifier[self] . identifier[max_iter] : keyword[raise] identifier[ConstraintError] ( literal[string] ) identifier[counter] += literal[int] keyword[return] identifier[result] * identifier[scale]
def project(self, x, vector): """Project a vector (gradient or direction) on the active constraints. Arguments: | ``x`` -- The unknowns. | ``vector`` -- A numpy array with a direction or a gradient. The return value is a gradient or direction, where the components that point away from the constraints are projected out. In case of half-open constraints, the projection is only active of the vector points into the infeasible region. """ scale = np.linalg.norm(vector) if scale == 0.0: return vector # depends on [control=['if'], data=[]] self.lock[:] = False (normals, signs) = self._compute_equations(x)[::3] if len(normals) == 0: return vector # depends on [control=['if'], data=[]] vector = vector / scale mask = signs == 0 result = vector.copy() changed = True counter = 0 while changed: changed = False y = np.dot(normals, result) for (i, sign) in enumerate(signs): if sign != 0: if sign * y[i] < -self.threshold: mask[i] = True changed = True # depends on [control=['if'], data=[]] elif mask[i] and np.dot(normals[i], result - vector) < 0: mask[i] = False changed = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['sign']] # depends on [control=['for'], data=[]] if mask.any(): normals_select = normals[mask] y = np.dot(normals_select, vector) (U, S, Vt) = np.linalg.svd(normals_select, full_matrices=False) if S.min() == 0.0: Sinv = S / (S ** 2 + self.rcond1) # depends on [control=['if'], data=[]] else: Sinv = 1.0 / S result = vector - np.dot(Vt.transpose(), np.dot(U.transpose(), y) * Sinv) # depends on [control=['if'], data=[]] else: result = vector.copy() if counter > self.max_iter: raise ConstraintError('Exceeded maximum number of shake iterations.') # depends on [control=['if'], data=[]] counter += 1 # depends on [control=['while'], data=[]] return result * scale
def project_variant_forward(self, c_variant): """ project c_variant on the source transcript onto the destination transcript :param c_variant: an :class:`hgvs.sequencevariant.SequenceVariant` object on the source transcript :returns: c_variant: an :class:`hgvs.sequencevariant.SequenceVariant` object on the destination transcript """ if c_variant.ac != self.src_tm.tx_ac: raise RuntimeError("variant accession does not match that used to initialize " + __name__) new_c_variant = copy.deepcopy(c_variant) new_c_variant.ac = self.dst_tm.tx_ac new_c_variant.posedit.pos = self.project_interval_forward(c_variant.posedit.pos) return new_c_variant
def function[project_variant_forward, parameter[self, c_variant]]: constant[ project c_variant on the source transcript onto the destination transcript :param c_variant: an :class:`hgvs.sequencevariant.SequenceVariant` object on the source transcript :returns: c_variant: an :class:`hgvs.sequencevariant.SequenceVariant` object on the destination transcript ] if compare[name[c_variant].ac not_equal[!=] name[self].src_tm.tx_ac] begin[:] <ast.Raise object at 0x7da1b20a8670> variable[new_c_variant] assign[=] call[name[copy].deepcopy, parameter[name[c_variant]]] name[new_c_variant].ac assign[=] name[self].dst_tm.tx_ac name[new_c_variant].posedit.pos assign[=] call[name[self].project_interval_forward, parameter[name[c_variant].posedit.pos]] return[name[new_c_variant]]
keyword[def] identifier[project_variant_forward] ( identifier[self] , identifier[c_variant] ): literal[string] keyword[if] identifier[c_variant] . identifier[ac] != identifier[self] . identifier[src_tm] . identifier[tx_ac] : keyword[raise] identifier[RuntimeError] ( literal[string] + identifier[__name__] ) identifier[new_c_variant] = identifier[copy] . identifier[deepcopy] ( identifier[c_variant] ) identifier[new_c_variant] . identifier[ac] = identifier[self] . identifier[dst_tm] . identifier[tx_ac] identifier[new_c_variant] . identifier[posedit] . identifier[pos] = identifier[self] . identifier[project_interval_forward] ( identifier[c_variant] . identifier[posedit] . identifier[pos] ) keyword[return] identifier[new_c_variant]
def project_variant_forward(self, c_variant): """ project c_variant on the source transcript onto the destination transcript :param c_variant: an :class:`hgvs.sequencevariant.SequenceVariant` object on the source transcript :returns: c_variant: an :class:`hgvs.sequencevariant.SequenceVariant` object on the destination transcript """ if c_variant.ac != self.src_tm.tx_ac: raise RuntimeError('variant accession does not match that used to initialize ' + __name__) # depends on [control=['if'], data=[]] new_c_variant = copy.deepcopy(c_variant) new_c_variant.ac = self.dst_tm.tx_ac new_c_variant.posedit.pos = self.project_interval_forward(c_variant.posedit.pos) return new_c_variant
def predict(self, a, b): """ Compute the test statistic Args: a (array-like): Variable 1 b (array-like): Variable 2 Returns: float: test statistic """ a = np.array(a).reshape((-1, 1)) b = np.array(b).reshape((-1, 1)) return sp.kendalltau(a, b)[0]
def function[predict, parameter[self, a, b]]: constant[ Compute the test statistic Args: a (array-like): Variable 1 b (array-like): Variable 2 Returns: float: test statistic ] variable[a] assign[=] call[call[name[np].array, parameter[name[a]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b0169e10>, <ast.Constant object at 0x7da1b0169db0>]]]] variable[b] assign[=] call[call[name[np].array, parameter[name[b]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da1b016b9d0>, <ast.Constant object at 0x7da1b016ba30>]]]] return[call[call[name[sp].kendalltau, parameter[name[a], name[b]]]][constant[0]]]
keyword[def] identifier[predict] ( identifier[self] , identifier[a] , identifier[b] ): literal[string] identifier[a] = identifier[np] . identifier[array] ( identifier[a] ). identifier[reshape] ((- literal[int] , literal[int] )) identifier[b] = identifier[np] . identifier[array] ( identifier[b] ). identifier[reshape] ((- literal[int] , literal[int] )) keyword[return] identifier[sp] . identifier[kendalltau] ( identifier[a] , identifier[b] )[ literal[int] ]
def predict(self, a, b): """ Compute the test statistic Args: a (array-like): Variable 1 b (array-like): Variable 2 Returns: float: test statistic """ a = np.array(a).reshape((-1, 1)) b = np.array(b).reshape((-1, 1)) return sp.kendalltau(a, b)[0]
def make_class_method_decorator(classkey, modname=None): """ register a class to be injectable classkey is a key that identifies the injected class REMEMBER to call inject_instance in __init__ Args: classkey : the class to be injected into modname : the global __name__ of the module youa re injecting from Returns: closure_decorate_class_method (func): decorator for injectable methods Example: >>> # ENABLE_DOCTEST >>> import utool as ut >>> class CheeseShop(object): ... def __init__(self): ... import utool as ut ... ut.inject_all_external_modules(self) >>> cheeseshop_method = ut.make_class_method_decorator(CheeseShop) >>> shop1 = CheeseShop() >>> assert not hasattr(shop1, 'has_cheese'), 'have not injected yet' >>> @cheeseshop_method >>> def has_cheese(self): >>> return False >>> shop2 = CheeseShop() >>> assert shop2.has_cheese() is False, 'external method not injected' >>> print('Cheese shop does not have cheese. All is well.') """ global __APP_MODNAME_REGISTER__ #if util_arg.VERBOSE or VERBOSE_CLASS: if VERBOSE_CLASS: print('[util_class] register via make_class_method_decorator classkey=%r, modname=%r' % (classkey, modname)) if modname == '__main__': # skips reinjects into main print('WARNING: cannot register classkey=%r functions as __main__' % (classkey,)) return lambda func: func # register that this module was injected into if isinstance(classkey, tuple): classname, _ = classkey __CLASSNAME_CLASSKEY_REGISTER__[classname].append(modname) elif isinstance(classkey, type): classname = classkey.__name__ if modname is not None: assert modname == classkey.__module__, ( 'modname=%r does not agree with __module__=%r' % ( modname, classkey.__module__)) modname = classkey.__module__ # Convert to new classkey format classkey = (classname, modname) __CLASSNAME_CLASSKEY_REGISTER__[classname].append(modname) else: print('Warning not using classkey for %r %r' % (classkey, modname)) raise AssertionError('classkey no longer supported. Use class_inject_key instead') closure_decorate_class_method = functools.partial(decorate_class_method, classkey=classkey) return closure_decorate_class_method
def function[make_class_method_decorator, parameter[classkey, modname]]: constant[ register a class to be injectable classkey is a key that identifies the injected class REMEMBER to call inject_instance in __init__ Args: classkey : the class to be injected into modname : the global __name__ of the module youa re injecting from Returns: closure_decorate_class_method (func): decorator for injectable methods Example: >>> # ENABLE_DOCTEST >>> import utool as ut >>> class CheeseShop(object): ... def __init__(self): ... import utool as ut ... ut.inject_all_external_modules(self) >>> cheeseshop_method = ut.make_class_method_decorator(CheeseShop) >>> shop1 = CheeseShop() >>> assert not hasattr(shop1, 'has_cheese'), 'have not injected yet' >>> @cheeseshop_method >>> def has_cheese(self): >>> return False >>> shop2 = CheeseShop() >>> assert shop2.has_cheese() is False, 'external method not injected' >>> print('Cheese shop does not have cheese. All is well.') ] <ast.Global object at 0x7da1b2406980> if name[VERBOSE_CLASS] begin[:] call[name[print], parameter[binary_operation[constant[[util_class] register via make_class_method_decorator classkey=%r, modname=%r] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2406bf0>, <ast.Name object at 0x7da1b2406a40>]]]]] if compare[name[modname] equal[==] constant[__main__]] begin[:] call[name[print], parameter[binary_operation[constant[WARNING: cannot register classkey=%r functions as __main__] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b2406650>]]]]] return[<ast.Lambda object at 0x7da1b24050c0>] if call[name[isinstance], parameter[name[classkey], name[tuple]]] begin[:] <ast.Tuple object at 0x7da1b2406410> assign[=] name[classkey] call[call[name[__CLASSNAME_CLASSKEY_REGISTER__]][name[classname]].append, parameter[name[modname]]] variable[closure_decorate_class_method] assign[=] call[name[functools].partial, parameter[name[decorate_class_method]]] return[name[closure_decorate_class_method]]
keyword[def] identifier[make_class_method_decorator] ( identifier[classkey] , identifier[modname] = keyword[None] ): literal[string] keyword[global] identifier[__APP_MODNAME_REGISTER__] keyword[if] identifier[VERBOSE_CLASS] : identifier[print] ( literal[string] %( identifier[classkey] , identifier[modname] )) keyword[if] identifier[modname] == literal[string] : identifier[print] ( literal[string] %( identifier[classkey] ,)) keyword[return] keyword[lambda] identifier[func] : identifier[func] keyword[if] identifier[isinstance] ( identifier[classkey] , identifier[tuple] ): identifier[classname] , identifier[_] = identifier[classkey] identifier[__CLASSNAME_CLASSKEY_REGISTER__] [ identifier[classname] ]. identifier[append] ( identifier[modname] ) keyword[elif] identifier[isinstance] ( identifier[classkey] , identifier[type] ): identifier[classname] = identifier[classkey] . identifier[__name__] keyword[if] identifier[modname] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[modname] == identifier[classkey] . identifier[__module__] ,( literal[string] %( identifier[modname] , identifier[classkey] . identifier[__module__] )) identifier[modname] = identifier[classkey] . identifier[__module__] identifier[classkey] =( identifier[classname] , identifier[modname] ) identifier[__CLASSNAME_CLASSKEY_REGISTER__] [ identifier[classname] ]. identifier[append] ( identifier[modname] ) keyword[else] : identifier[print] ( literal[string] %( identifier[classkey] , identifier[modname] )) keyword[raise] identifier[AssertionError] ( literal[string] ) identifier[closure_decorate_class_method] = identifier[functools] . identifier[partial] ( identifier[decorate_class_method] , identifier[classkey] = identifier[classkey] ) keyword[return] identifier[closure_decorate_class_method]
def make_class_method_decorator(classkey, modname=None): """ register a class to be injectable classkey is a key that identifies the injected class REMEMBER to call inject_instance in __init__ Args: classkey : the class to be injected into modname : the global __name__ of the module youa re injecting from Returns: closure_decorate_class_method (func): decorator for injectable methods Example: >>> # ENABLE_DOCTEST >>> import utool as ut >>> class CheeseShop(object): ... def __init__(self): ... import utool as ut ... ut.inject_all_external_modules(self) >>> cheeseshop_method = ut.make_class_method_decorator(CheeseShop) >>> shop1 = CheeseShop() >>> assert not hasattr(shop1, 'has_cheese'), 'have not injected yet' >>> @cheeseshop_method >>> def has_cheese(self): >>> return False >>> shop2 = CheeseShop() >>> assert shop2.has_cheese() is False, 'external method not injected' >>> print('Cheese shop does not have cheese. All is well.') """ global __APP_MODNAME_REGISTER__ #if util_arg.VERBOSE or VERBOSE_CLASS: if VERBOSE_CLASS: print('[util_class] register via make_class_method_decorator classkey=%r, modname=%r' % (classkey, modname)) # depends on [control=['if'], data=[]] if modname == '__main__': # skips reinjects into main print('WARNING: cannot register classkey=%r functions as __main__' % (classkey,)) return lambda func: func # depends on [control=['if'], data=[]] # register that this module was injected into if isinstance(classkey, tuple): (classname, _) = classkey __CLASSNAME_CLASSKEY_REGISTER__[classname].append(modname) # depends on [control=['if'], data=[]] elif isinstance(classkey, type): classname = classkey.__name__ if modname is not None: assert modname == classkey.__module__, 'modname=%r does not agree with __module__=%r' % (modname, classkey.__module__) # depends on [control=['if'], data=['modname']] modname = classkey.__module__ # Convert to new classkey format classkey = (classname, modname) __CLASSNAME_CLASSKEY_REGISTER__[classname].append(modname) # depends on [control=['if'], data=[]] else: print('Warning not using classkey for %r %r' % (classkey, modname)) raise AssertionError('classkey no longer supported. Use class_inject_key instead') closure_decorate_class_method = functools.partial(decorate_class_method, classkey=classkey) return closure_decorate_class_method
def do_step(self, values, xy_values,coeff, width): """Calculates forces between two diagrams and pushes them apart by tenth of width""" forces = {k:[] for k,i in enumerate(xy_values)} for (index1, value1), (index2,value2) in combinations(enumerate(xy_values),2): f = self.calc_2d_forces(value1[0],value1[1],value2[0],value2[1],width) if coeff[index1] < coeff[index2]: if self.b_lenght-coeff[index2]<self.b_lenght/10: #a quick and dirty solution, but works forces[index1].append(f[1]) # push to left (smaller projection value) forces[index2].append(f[0]) else: #all is normal forces[index1].append(f[0]) # push to left (smaller projection value) forces[index2].append(f[1]) else: if self.b_lenght-coeff[index1]<self.b_lenght/10: #a quick and dirty solution, but works forces[index1].append(f[0]) # push to left (smaller projection value) forces[index2].append(f[1]) else: #if all is normal forces[index1].append(f[1]) # push to left (smaller projection value) forces[index2].append(f[0]) forces = {k:sum(v) for k,v in forces.items()} energy = sum([abs(x) for x in forces.values()]) return [(forces[k]/10+v) for k, v in enumerate(values)], energy
def function[do_step, parameter[self, values, xy_values, coeff, width]]: constant[Calculates forces between two diagrams and pushes them apart by tenth of width] variable[forces] assign[=] <ast.DictComp object at 0x7da18bc73df0> for taget[tuple[[<ast.Tuple object at 0x7da18bc705b0>, <ast.Tuple object at 0x7da18bc72a70>]]] in starred[call[name[combinations], parameter[call[name[enumerate], parameter[name[xy_values]]], constant[2]]]] begin[:] variable[f] assign[=] call[name[self].calc_2d_forces, parameter[call[name[value1]][constant[0]], call[name[value1]][constant[1]], call[name[value2]][constant[0]], call[name[value2]][constant[1]], name[width]]] if compare[call[name[coeff]][name[index1]] less[<] call[name[coeff]][name[index2]]] begin[:] if compare[binary_operation[name[self].b_lenght - call[name[coeff]][name[index2]]] less[<] binary_operation[name[self].b_lenght / constant[10]]] begin[:] call[call[name[forces]][name[index1]].append, parameter[call[name[f]][constant[1]]]] call[call[name[forces]][name[index2]].append, parameter[call[name[f]][constant[0]]]] variable[forces] assign[=] <ast.DictComp object at 0x7da20cabce80> variable[energy] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da20cabc5e0>]] return[tuple[[<ast.ListComp object at 0x7da20cabe080>, <ast.Name object at 0x7da20cabebf0>]]]
keyword[def] identifier[do_step] ( identifier[self] , identifier[values] , identifier[xy_values] , identifier[coeff] , identifier[width] ): literal[string] identifier[forces] ={ identifier[k] :[] keyword[for] identifier[k] , identifier[i] keyword[in] identifier[enumerate] ( identifier[xy_values] )} keyword[for] ( identifier[index1] , identifier[value1] ),( identifier[index2] , identifier[value2] ) keyword[in] identifier[combinations] ( identifier[enumerate] ( identifier[xy_values] ), literal[int] ): identifier[f] = identifier[self] . identifier[calc_2d_forces] ( identifier[value1] [ literal[int] ], identifier[value1] [ literal[int] ], identifier[value2] [ literal[int] ], identifier[value2] [ literal[int] ], identifier[width] ) keyword[if] identifier[coeff] [ identifier[index1] ]< identifier[coeff] [ identifier[index2] ]: keyword[if] identifier[self] . identifier[b_lenght] - identifier[coeff] [ identifier[index2] ]< identifier[self] . identifier[b_lenght] / literal[int] : identifier[forces] [ identifier[index1] ]. identifier[append] ( identifier[f] [ literal[int] ]) identifier[forces] [ identifier[index2] ]. identifier[append] ( identifier[f] [ literal[int] ]) keyword[else] : identifier[forces] [ identifier[index1] ]. identifier[append] ( identifier[f] [ literal[int] ]) identifier[forces] [ identifier[index2] ]. identifier[append] ( identifier[f] [ literal[int] ]) keyword[else] : keyword[if] identifier[self] . identifier[b_lenght] - identifier[coeff] [ identifier[index1] ]< identifier[self] . identifier[b_lenght] / literal[int] : identifier[forces] [ identifier[index1] ]. identifier[append] ( identifier[f] [ literal[int] ]) identifier[forces] [ identifier[index2] ]. identifier[append] ( identifier[f] [ literal[int] ]) keyword[else] : identifier[forces] [ identifier[index1] ]. identifier[append] ( identifier[f] [ literal[int] ]) identifier[forces] [ identifier[index2] ]. identifier[append] ( identifier[f] [ literal[int] ]) identifier[forces] ={ identifier[k] : identifier[sum] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[forces] . identifier[items] ()} identifier[energy] = identifier[sum] ([ identifier[abs] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[forces] . identifier[values] ()]) keyword[return] [( identifier[forces] [ identifier[k] ]/ literal[int] + identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[enumerate] ( identifier[values] )], identifier[energy]
def do_step(self, values, xy_values, coeff, width): """Calculates forces between two diagrams and pushes them apart by tenth of width""" forces = {k: [] for (k, i) in enumerate(xy_values)} for ((index1, value1), (index2, value2)) in combinations(enumerate(xy_values), 2): f = self.calc_2d_forces(value1[0], value1[1], value2[0], value2[1], width) if coeff[index1] < coeff[index2]: if self.b_lenght - coeff[index2] < self.b_lenght / 10: #a quick and dirty solution, but works forces[index1].append(f[1]) # push to left (smaller projection value) forces[index2].append(f[0]) # depends on [control=['if'], data=[]] else: #all is normal forces[index1].append(f[0]) # push to left (smaller projection value) forces[index2].append(f[1]) # depends on [control=['if'], data=[]] elif self.b_lenght - coeff[index1] < self.b_lenght / 10: #a quick and dirty solution, but works forces[index1].append(f[0]) # push to left (smaller projection value) forces[index2].append(f[1]) # depends on [control=['if'], data=[]] else: #if all is normal forces[index1].append(f[1]) # push to left (smaller projection value) forces[index2].append(f[0]) # depends on [control=['for'], data=[]] forces = {k: sum(v) for (k, v) in forces.items()} energy = sum([abs(x) for x in forces.values()]) return ([forces[k] / 10 + v for (k, v) in enumerate(values)], energy)
def _DecodeUnknownMessages(message, encoded_message, pair_type): """Process unknown fields in encoded_message of a message type.""" field_type = pair_type.value.type new_values = [] all_field_names = [x.name for x in message.all_fields()] for name, value_dict in six.iteritems(encoded_message): if name in all_field_names: continue value = PyValueToMessage(field_type, value_dict) if pair_type.value.repeated: value = _AsMessageList(value) new_pair = pair_type(key=name, value=value) new_values.append(new_pair) return new_values
def function[_DecodeUnknownMessages, parameter[message, encoded_message, pair_type]]: constant[Process unknown fields in encoded_message of a message type.] variable[field_type] assign[=] name[pair_type].value.type variable[new_values] assign[=] list[[]] variable[all_field_names] assign[=] <ast.ListComp object at 0x7da1b0847970> for taget[tuple[[<ast.Name object at 0x7da1b07b9450>, <ast.Name object at 0x7da1b07bb3d0>]]] in starred[call[name[six].iteritems, parameter[name[encoded_message]]]] begin[:] if compare[name[name] in name[all_field_names]] begin[:] continue variable[value] assign[=] call[name[PyValueToMessage], parameter[name[field_type], name[value_dict]]] if name[pair_type].value.repeated begin[:] variable[value] assign[=] call[name[_AsMessageList], parameter[name[value]]] variable[new_pair] assign[=] call[name[pair_type], parameter[]] call[name[new_values].append, parameter[name[new_pair]]] return[name[new_values]]
keyword[def] identifier[_DecodeUnknownMessages] ( identifier[message] , identifier[encoded_message] , identifier[pair_type] ): literal[string] identifier[field_type] = identifier[pair_type] . identifier[value] . identifier[type] identifier[new_values] =[] identifier[all_field_names] =[ identifier[x] . identifier[name] keyword[for] identifier[x] keyword[in] identifier[message] . identifier[all_fields] ()] keyword[for] identifier[name] , identifier[value_dict] keyword[in] identifier[six] . identifier[iteritems] ( identifier[encoded_message] ): keyword[if] identifier[name] keyword[in] identifier[all_field_names] : keyword[continue] identifier[value] = identifier[PyValueToMessage] ( identifier[field_type] , identifier[value_dict] ) keyword[if] identifier[pair_type] . identifier[value] . identifier[repeated] : identifier[value] = identifier[_AsMessageList] ( identifier[value] ) identifier[new_pair] = identifier[pair_type] ( identifier[key] = identifier[name] , identifier[value] = identifier[value] ) identifier[new_values] . identifier[append] ( identifier[new_pair] ) keyword[return] identifier[new_values]
def _DecodeUnknownMessages(message, encoded_message, pair_type): """Process unknown fields in encoded_message of a message type.""" field_type = pair_type.value.type new_values = [] all_field_names = [x.name for x in message.all_fields()] for (name, value_dict) in six.iteritems(encoded_message): if name in all_field_names: continue # depends on [control=['if'], data=[]] value = PyValueToMessage(field_type, value_dict) if pair_type.value.repeated: value = _AsMessageList(value) # depends on [control=['if'], data=[]] new_pair = pair_type(key=name, value=value) new_values.append(new_pair) # depends on [control=['for'], data=[]] return new_values
def extend(self, xs: Union['List[T]', typing.List[T]]) -> 'List[T]': # type: ignore """doufo.List.extend Args: `self` `xs` (`Union['List[T]', typing.List[T]]`): Another List object or Typing.List Returns: extented `List` (`List[T]`) """ return type(self)(self.unbox() + List(xs).unbox())
def function[extend, parameter[self, xs]]: constant[doufo.List.extend Args: `self` `xs` (`Union['List[T]', typing.List[T]]`): Another List object or Typing.List Returns: extented `List` (`List[T]`) ] return[call[call[name[type], parameter[name[self]]], parameter[binary_operation[call[name[self].unbox, parameter[]] + call[call[name[List], parameter[name[xs]]].unbox, parameter[]]]]]]
keyword[def] identifier[extend] ( identifier[self] , identifier[xs] : identifier[Union] [ literal[string] , identifier[typing] . identifier[List] [ identifier[T] ]])-> literal[string] : literal[string] keyword[return] identifier[type] ( identifier[self] )( identifier[self] . identifier[unbox] ()+ identifier[List] ( identifier[xs] ). identifier[unbox] ())
def extend(self, xs: Union['List[T]', typing.List[T]]) -> 'List[T]': # type: ignore "doufo.List.extend \n Args: \n `self` \n `xs` (`Union['List[T]', typing.List[T]]`): Another List object or Typing.List \n Returns: \n extented `List` (`List[T]`)\n " return type(self)(self.unbox() + List(xs).unbox())
def get_stp_mst_detail_output_cist_port_if_role(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_stp_mst_detail = ET.Element("get_stp_mst_detail") config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, "output") cist = ET.SubElement(output, "cist") port = ET.SubElement(cist, "port") if_role = ET.SubElement(port, "if-role") if_role.text = kwargs.pop('if_role') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[get_stp_mst_detail_output_cist_port_if_role, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[get_stp_mst_detail] assign[=] call[name[ET].Element, parameter[constant[get_stp_mst_detail]]] variable[config] assign[=] name[get_stp_mst_detail] variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_stp_mst_detail], constant[output]]] variable[cist] assign[=] call[name[ET].SubElement, parameter[name[output], constant[cist]]] variable[port] assign[=] call[name[ET].SubElement, parameter[name[cist], constant[port]]] variable[if_role] assign[=] call[name[ET].SubElement, parameter[name[port], constant[if-role]]] name[if_role].text assign[=] call[name[kwargs].pop, parameter[constant[if_role]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[get_stp_mst_detail_output_cist_port_if_role] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[get_stp_mst_detail] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[get_stp_mst_detail] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_stp_mst_detail] , literal[string] ) identifier[cist] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[port] = identifier[ET] . identifier[SubElement] ( identifier[cist] , literal[string] ) identifier[if_role] = identifier[ET] . identifier[SubElement] ( identifier[port] , literal[string] ) identifier[if_role] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def get_stp_mst_detail_output_cist_port_if_role(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') get_stp_mst_detail = ET.Element('get_stp_mst_detail') config = get_stp_mst_detail output = ET.SubElement(get_stp_mst_detail, 'output') cist = ET.SubElement(output, 'cist') port = ET.SubElement(cist, 'port') if_role = ET.SubElement(port, 'if-role') if_role.text = kwargs.pop('if_role') callback = kwargs.pop('callback', self._callback) return callback(config)
def slice_to(self, s): ''' Copy the slice into the supplied StringBuffer @type s: string ''' result = '' if self.slice_check(): result = self.current[self.bra:self.ket] return result
def function[slice_to, parameter[self, s]]: constant[ Copy the slice into the supplied StringBuffer @type s: string ] variable[result] assign[=] constant[] if call[name[self].slice_check, parameter[]] begin[:] variable[result] assign[=] call[name[self].current][<ast.Slice object at 0x7da1b069feb0>] return[name[result]]
keyword[def] identifier[slice_to] ( identifier[self] , identifier[s] ): literal[string] identifier[result] = literal[string] keyword[if] identifier[self] . identifier[slice_check] (): identifier[result] = identifier[self] . identifier[current] [ identifier[self] . identifier[bra] : identifier[self] . identifier[ket] ] keyword[return] identifier[result]
def slice_to(self, s): """ Copy the slice into the supplied StringBuffer @type s: string """ result = '' if self.slice_check(): result = self.current[self.bra:self.ket] # depends on [control=['if'], data=[]] return result
def parse(self, rev_string): """ :param rev_string: :type rev_string: str """ elements = rev_string.split(MESSAGE_LINE_SEPARATOR) heading = elements[0] heading_elements = heading.split(" ") self.revision_id = heading_elements[2] datetime_str = "{} {}".format( heading_elements[0], heading_elements[1] ) self.release_date = datetime.datetime.strptime( datetime_str, DATETIME_FORMAT ) self.description = elements[1] self.message = elements[2]
def function[parse, parameter[self, rev_string]]: constant[ :param rev_string: :type rev_string: str ] variable[elements] assign[=] call[name[rev_string].split, parameter[name[MESSAGE_LINE_SEPARATOR]]] variable[heading] assign[=] call[name[elements]][constant[0]] variable[heading_elements] assign[=] call[name[heading].split, parameter[constant[ ]]] name[self].revision_id assign[=] call[name[heading_elements]][constant[2]] variable[datetime_str] assign[=] call[constant[{} {}].format, parameter[call[name[heading_elements]][constant[0]], call[name[heading_elements]][constant[1]]]] name[self].release_date assign[=] call[name[datetime].datetime.strptime, parameter[name[datetime_str], name[DATETIME_FORMAT]]] name[self].description assign[=] call[name[elements]][constant[1]] name[self].message assign[=] call[name[elements]][constant[2]]
keyword[def] identifier[parse] ( identifier[self] , identifier[rev_string] ): literal[string] identifier[elements] = identifier[rev_string] . identifier[split] ( identifier[MESSAGE_LINE_SEPARATOR] ) identifier[heading] = identifier[elements] [ literal[int] ] identifier[heading_elements] = identifier[heading] . identifier[split] ( literal[string] ) identifier[self] . identifier[revision_id] = identifier[heading_elements] [ literal[int] ] identifier[datetime_str] = literal[string] . identifier[format] ( identifier[heading_elements] [ literal[int] ], identifier[heading_elements] [ literal[int] ] ) identifier[self] . identifier[release_date] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[datetime_str] , identifier[DATETIME_FORMAT] ) identifier[self] . identifier[description] = identifier[elements] [ literal[int] ] identifier[self] . identifier[message] = identifier[elements] [ literal[int] ]
def parse(self, rev_string): """ :param rev_string: :type rev_string: str """ elements = rev_string.split(MESSAGE_LINE_SEPARATOR) heading = elements[0] heading_elements = heading.split(' ') self.revision_id = heading_elements[2] datetime_str = '{} {}'.format(heading_elements[0], heading_elements[1]) self.release_date = datetime.datetime.strptime(datetime_str, DATETIME_FORMAT) self.description = elements[1] self.message = elements[2]
def _iter_interleaved_items(self, elements): """Generate element or subtotal items in interleaved order. This ordering corresponds to how value "rows" (or columns) are to appear after subtotals have been inserted at their anchor locations. Where more than one subtotal is anchored to the same location, they appear in their document order in the cube response. Only elements in the passed *elements* collection appear, which allows control over whether missing elements are included by choosing `.all_elements` or `.valid_elements`. """ subtotals = self._subtotals for subtotal in subtotals.iter_for_anchor("top"): yield subtotal for element in elements: yield element for subtotal in subtotals.iter_for_anchor(element.element_id): yield subtotal for subtotal in subtotals.iter_for_anchor("bottom"): yield subtotal
def function[_iter_interleaved_items, parameter[self, elements]]: constant[Generate element or subtotal items in interleaved order. This ordering corresponds to how value "rows" (or columns) are to appear after subtotals have been inserted at their anchor locations. Where more than one subtotal is anchored to the same location, they appear in their document order in the cube response. Only elements in the passed *elements* collection appear, which allows control over whether missing elements are included by choosing `.all_elements` or `.valid_elements`. ] variable[subtotals] assign[=] name[self]._subtotals for taget[name[subtotal]] in starred[call[name[subtotals].iter_for_anchor, parameter[constant[top]]]] begin[:] <ast.Yield object at 0x7da18dc98b80> for taget[name[element]] in starred[name[elements]] begin[:] <ast.Yield object at 0x7da18dc9a770> for taget[name[subtotal]] in starred[call[name[subtotals].iter_for_anchor, parameter[name[element].element_id]]] begin[:] <ast.Yield object at 0x7da18dc9ada0> for taget[name[subtotal]] in starred[call[name[subtotals].iter_for_anchor, parameter[constant[bottom]]]] begin[:] <ast.Yield object at 0x7da18dc9bc40>
keyword[def] identifier[_iter_interleaved_items] ( identifier[self] , identifier[elements] ): literal[string] identifier[subtotals] = identifier[self] . identifier[_subtotals] keyword[for] identifier[subtotal] keyword[in] identifier[subtotals] . identifier[iter_for_anchor] ( literal[string] ): keyword[yield] identifier[subtotal] keyword[for] identifier[element] keyword[in] identifier[elements] : keyword[yield] identifier[element] keyword[for] identifier[subtotal] keyword[in] identifier[subtotals] . identifier[iter_for_anchor] ( identifier[element] . identifier[element_id] ): keyword[yield] identifier[subtotal] keyword[for] identifier[subtotal] keyword[in] identifier[subtotals] . identifier[iter_for_anchor] ( literal[string] ): keyword[yield] identifier[subtotal]
def _iter_interleaved_items(self, elements): """Generate element or subtotal items in interleaved order. This ordering corresponds to how value "rows" (or columns) are to appear after subtotals have been inserted at their anchor locations. Where more than one subtotal is anchored to the same location, they appear in their document order in the cube response. Only elements in the passed *elements* collection appear, which allows control over whether missing elements are included by choosing `.all_elements` or `.valid_elements`. """ subtotals = self._subtotals for subtotal in subtotals.iter_for_anchor('top'): yield subtotal # depends on [control=['for'], data=['subtotal']] for element in elements: yield element for subtotal in subtotals.iter_for_anchor(element.element_id): yield subtotal # depends on [control=['for'], data=['subtotal']] # depends on [control=['for'], data=['element']] for subtotal in subtotals.iter_for_anchor('bottom'): yield subtotal # depends on [control=['for'], data=['subtotal']]
def get_predictions_under_consistency(instance): ''' Computes the set of signs on edges/vertices that can be cautiously derived from [instance], minus those that are a direct consequence of obs_[ev]label predicates ''' inst = instance.to_file() prg = [ prediction_prg, inst, exclude_sol([]) ] solver = GringoClasp(clasp_options='--project --enum-mode cautious') models = solver.run(prg, collapseTerms=True, collapseAtoms=False) os.unlink(inst) os.unlink(prg[2]) return whatsnew(instance,models[0])
def function[get_predictions_under_consistency, parameter[instance]]: constant[ Computes the set of signs on edges/vertices that can be cautiously derived from [instance], minus those that are a direct consequence of obs_[ev]label predicates ] variable[inst] assign[=] call[name[instance].to_file, parameter[]] variable[prg] assign[=] list[[<ast.Name object at 0x7da204621ea0>, <ast.Name object at 0x7da204622560>, <ast.Call object at 0x7da204623130>]] variable[solver] assign[=] call[name[GringoClasp], parameter[]] variable[models] assign[=] call[name[solver].run, parameter[name[prg]]] call[name[os].unlink, parameter[name[inst]]] call[name[os].unlink, parameter[call[name[prg]][constant[2]]]] return[call[name[whatsnew], parameter[name[instance], call[name[models]][constant[0]]]]]
keyword[def] identifier[get_predictions_under_consistency] ( identifier[instance] ): literal[string] identifier[inst] = identifier[instance] . identifier[to_file] () identifier[prg] =[ identifier[prediction_prg] , identifier[inst] , identifier[exclude_sol] ([])] identifier[solver] = identifier[GringoClasp] ( identifier[clasp_options] = literal[string] ) identifier[models] = identifier[solver] . identifier[run] ( identifier[prg] , identifier[collapseTerms] = keyword[True] , identifier[collapseAtoms] = keyword[False] ) identifier[os] . identifier[unlink] ( identifier[inst] ) identifier[os] . identifier[unlink] ( identifier[prg] [ literal[int] ]) keyword[return] identifier[whatsnew] ( identifier[instance] , identifier[models] [ literal[int] ])
def get_predictions_under_consistency(instance): """ Computes the set of signs on edges/vertices that can be cautiously derived from [instance], minus those that are a direct consequence of obs_[ev]label predicates """ inst = instance.to_file() prg = [prediction_prg, inst, exclude_sol([])] solver = GringoClasp(clasp_options='--project --enum-mode cautious') models = solver.run(prg, collapseTerms=True, collapseAtoms=False) os.unlink(inst) os.unlink(prg[2]) return whatsnew(instance, models[0])
def get_doctree(path, **kwargs): """ Obtain a Sphinx doctree from the RST file at ``path``. Performs no Releases-specific processing; this code would, ideally, be in Sphinx itself, but things there are pretty tightly coupled. So we wrote this. Any additional kwargs are passed unmodified into an internal `make_app` call. :param str path: A relative or absolute file path string. :returns: A two-tuple of the generated ``sphinx.application.Sphinx`` app and the doctree (a ``docutils.document`` object). .. versionchanged:: 1.6 Added support for passing kwargs to `make_app`. """ root, filename = os.path.split(path) docname, _ = os.path.splitext(filename) # TODO: this only works for top level changelog files (i.e. ones where # their dirname is the project/doc root) app = make_app(srcdir=root, **kwargs) # Create & init a BuildEnvironment. Mm, tasty side effects. app._init_env(freshenv=True) env = app.env # More arity/API changes: Sphinx 1.3/1.4-ish require one to pass in the app # obj in BuildEnvironment.update(); modern Sphinx performs that inside # Application._init_env() (which we just called above) and so that kwarg is # removed from update(). EAFP. kwargs = dict( config=app.config, srcdir=root, doctreedir=app.doctreedir, app=app, ) try: env.update(**kwargs) except TypeError: # Assume newer Sphinx w/o an app= kwarg del kwargs['app'] env.update(**kwargs) # Code taken from sphinx.environment.read_doc; easier to manually call # it with a working Environment object, instead of doing more random crap # to trick the higher up build system into thinking our single changelog # document was "updated". env.temp_data['docname'] = docname env.app = app # NOTE: SphinxStandaloneReader API changed in 1.4 :( reader_kwargs = { 'app': app, 'parsers': env.config.source_parsers, } if sphinx.version_info[:2] < (1, 4): del reader_kwargs['app'] # This monkeypatches (!!!) docutils to 'inject' all registered Sphinx # domains' roles & so forth. Without this, rendering the doctree lacks # almost all Sphinx magic, including things like :ref: and :doc:! with sphinx_domains(env): try: reader = SphinxStandaloneReader(**reader_kwargs) except TypeError: # If we import from io, this happens automagically, not in API del reader_kwargs['parsers'] reader = SphinxStandaloneReader(**reader_kwargs) pub = Publisher(reader=reader, writer=SphinxDummyWriter(), destination_class=NullOutput) pub.set_components(None, 'restructuredtext', None) pub.process_programmatic_settings(None, env.settings, None) # NOTE: docname derived higher up, from our given path src_path = env.doc2path(docname) source = SphinxFileInput( app, env, source=None, source_path=src_path, encoding=env.config.source_encoding, ) pub.source = source pub.settings._source = src_path pub.set_destination(None, None) pub.publish() return app, pub.document
def function[get_doctree, parameter[path]]: constant[ Obtain a Sphinx doctree from the RST file at ``path``. Performs no Releases-specific processing; this code would, ideally, be in Sphinx itself, but things there are pretty tightly coupled. So we wrote this. Any additional kwargs are passed unmodified into an internal `make_app` call. :param str path: A relative or absolute file path string. :returns: A two-tuple of the generated ``sphinx.application.Sphinx`` app and the doctree (a ``docutils.document`` object). .. versionchanged:: 1.6 Added support for passing kwargs to `make_app`. ] <ast.Tuple object at 0x7da1b0535b10> assign[=] call[name[os].path.split, parameter[name[path]]] <ast.Tuple object at 0x7da1b0537c10> assign[=] call[name[os].path.splitext, parameter[name[filename]]] variable[app] assign[=] call[name[make_app], parameter[]] call[name[app]._init_env, parameter[]] variable[env] assign[=] name[app].env variable[kwargs] assign[=] call[name[dict], parameter[]] <ast.Try object at 0x7da1b0535e70> call[name[env].temp_data][constant[docname]] assign[=] name[docname] name[env].app assign[=] name[app] variable[reader_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b0536920>, <ast.Constant object at 0x7da1b05379a0>], [<ast.Name object at 0x7da1b0537e20>, <ast.Attribute object at 0x7da1b0536620>]] if compare[call[name[sphinx].version_info][<ast.Slice object at 0x7da1b05373d0>] less[<] tuple[[<ast.Constant object at 0x7da1b0534e80>, <ast.Constant object at 0x7da1b05343d0>]]] begin[:] <ast.Delete object at 0x7da1b05350c0> with call[name[sphinx_domains], parameter[name[env]]] begin[:] <ast.Try object at 0x7da1b0537040> variable[pub] assign[=] call[name[Publisher], parameter[]] call[name[pub].set_components, parameter[constant[None], constant[restructuredtext], constant[None]]] call[name[pub].process_programmatic_settings, parameter[constant[None], name[env].settings, constant[None]]] variable[src_path] assign[=] call[name[env].doc2path, parameter[name[docname]]] variable[source] assign[=] call[name[SphinxFileInput], parameter[name[app], name[env]]] name[pub].source assign[=] name[source] name[pub].settings._source assign[=] name[src_path] call[name[pub].set_destination, parameter[constant[None], constant[None]]] call[name[pub].publish, parameter[]] return[tuple[[<ast.Name object at 0x7da1b05e11b0>, <ast.Attribute object at 0x7da1b05e3430>]]]
keyword[def] identifier[get_doctree] ( identifier[path] ,** identifier[kwargs] ): literal[string] identifier[root] , identifier[filename] = identifier[os] . identifier[path] . identifier[split] ( identifier[path] ) identifier[docname] , identifier[_] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[filename] ) identifier[app] = identifier[make_app] ( identifier[srcdir] = identifier[root] ,** identifier[kwargs] ) identifier[app] . identifier[_init_env] ( identifier[freshenv] = keyword[True] ) identifier[env] = identifier[app] . identifier[env] identifier[kwargs] = identifier[dict] ( identifier[config] = identifier[app] . identifier[config] , identifier[srcdir] = identifier[root] , identifier[doctreedir] = identifier[app] . identifier[doctreedir] , identifier[app] = identifier[app] , ) keyword[try] : identifier[env] . identifier[update] (** identifier[kwargs] ) keyword[except] identifier[TypeError] : keyword[del] identifier[kwargs] [ literal[string] ] identifier[env] . identifier[update] (** identifier[kwargs] ) identifier[env] . identifier[temp_data] [ literal[string] ]= identifier[docname] identifier[env] . identifier[app] = identifier[app] identifier[reader_kwargs] ={ literal[string] : identifier[app] , literal[string] : identifier[env] . identifier[config] . identifier[source_parsers] , } keyword[if] identifier[sphinx] . identifier[version_info] [: literal[int] ]<( literal[int] , literal[int] ): keyword[del] identifier[reader_kwargs] [ literal[string] ] keyword[with] identifier[sphinx_domains] ( identifier[env] ): keyword[try] : identifier[reader] = identifier[SphinxStandaloneReader] (** identifier[reader_kwargs] ) keyword[except] identifier[TypeError] : keyword[del] identifier[reader_kwargs] [ literal[string] ] identifier[reader] = identifier[SphinxStandaloneReader] (** identifier[reader_kwargs] ) identifier[pub] = identifier[Publisher] ( identifier[reader] = identifier[reader] , identifier[writer] = identifier[SphinxDummyWriter] (), identifier[destination_class] = identifier[NullOutput] ) identifier[pub] . identifier[set_components] ( keyword[None] , literal[string] , keyword[None] ) identifier[pub] . identifier[process_programmatic_settings] ( keyword[None] , identifier[env] . identifier[settings] , keyword[None] ) identifier[src_path] = identifier[env] . identifier[doc2path] ( identifier[docname] ) identifier[source] = identifier[SphinxFileInput] ( identifier[app] , identifier[env] , identifier[source] = keyword[None] , identifier[source_path] = identifier[src_path] , identifier[encoding] = identifier[env] . identifier[config] . identifier[source_encoding] , ) identifier[pub] . identifier[source] = identifier[source] identifier[pub] . identifier[settings] . identifier[_source] = identifier[src_path] identifier[pub] . identifier[set_destination] ( keyword[None] , keyword[None] ) identifier[pub] . identifier[publish] () keyword[return] identifier[app] , identifier[pub] . identifier[document]
def get_doctree(path, **kwargs): """ Obtain a Sphinx doctree from the RST file at ``path``. Performs no Releases-specific processing; this code would, ideally, be in Sphinx itself, but things there are pretty tightly coupled. So we wrote this. Any additional kwargs are passed unmodified into an internal `make_app` call. :param str path: A relative or absolute file path string. :returns: A two-tuple of the generated ``sphinx.application.Sphinx`` app and the doctree (a ``docutils.document`` object). .. versionchanged:: 1.6 Added support for passing kwargs to `make_app`. """ (root, filename) = os.path.split(path) (docname, _) = os.path.splitext(filename) # TODO: this only works for top level changelog files (i.e. ones where # their dirname is the project/doc root) app = make_app(srcdir=root, **kwargs) # Create & init a BuildEnvironment. Mm, tasty side effects. app._init_env(freshenv=True) env = app.env # More arity/API changes: Sphinx 1.3/1.4-ish require one to pass in the app # obj in BuildEnvironment.update(); modern Sphinx performs that inside # Application._init_env() (which we just called above) and so that kwarg is # removed from update(). EAFP. kwargs = dict(config=app.config, srcdir=root, doctreedir=app.doctreedir, app=app) try: env.update(**kwargs) # depends on [control=['try'], data=[]] except TypeError: # Assume newer Sphinx w/o an app= kwarg del kwargs['app'] env.update(**kwargs) # depends on [control=['except'], data=[]] # Code taken from sphinx.environment.read_doc; easier to manually call # it with a working Environment object, instead of doing more random crap # to trick the higher up build system into thinking our single changelog # document was "updated". env.temp_data['docname'] = docname env.app = app # NOTE: SphinxStandaloneReader API changed in 1.4 :( reader_kwargs = {'app': app, 'parsers': env.config.source_parsers} if sphinx.version_info[:2] < (1, 4): del reader_kwargs['app'] # depends on [control=['if'], data=[]] # This monkeypatches (!!!) docutils to 'inject' all registered Sphinx # domains' roles & so forth. Without this, rendering the doctree lacks # almost all Sphinx magic, including things like :ref: and :doc:! with sphinx_domains(env): try: reader = SphinxStandaloneReader(**reader_kwargs) # depends on [control=['try'], data=[]] except TypeError: # If we import from io, this happens automagically, not in API del reader_kwargs['parsers'] reader = SphinxStandaloneReader(**reader_kwargs) # depends on [control=['except'], data=[]] pub = Publisher(reader=reader, writer=SphinxDummyWriter(), destination_class=NullOutput) pub.set_components(None, 'restructuredtext', None) pub.process_programmatic_settings(None, env.settings, None) # NOTE: docname derived higher up, from our given path src_path = env.doc2path(docname) source = SphinxFileInput(app, env, source=None, source_path=src_path, encoding=env.config.source_encoding) pub.source = source pub.settings._source = src_path pub.set_destination(None, None) pub.publish() return (app, pub.document) # depends on [control=['with'], data=[]]
def save(self, *args, **kwargs): """ Overrides the save method """ self.slug = self.create_slug() super(Slugable, self).save(*args, **kwargs)
def function[save, parameter[self]]: constant[ Overrides the save method ] name[self].slug assign[=] call[name[self].create_slug, parameter[]] call[call[name[super], parameter[name[Slugable], name[self]]].save, parameter[<ast.Starred object at 0x7da1b27e0a60>]]
keyword[def] identifier[save] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[slug] = identifier[self] . identifier[create_slug] () identifier[super] ( identifier[Slugable] , identifier[self] ). identifier[save] (* identifier[args] ,** identifier[kwargs] )
def save(self, *args, **kwargs): """ Overrides the save method """ self.slug = self.create_slug() super(Slugable, self).save(*args, **kwargs)
def preprocess(*_unused, **processors): """ Decorator that applies pre-processors to the arguments of a function before calling the function. Parameters ---------- **processors : dict Map from argument name -> processor function. A processor function takes three arguments: (func, argname, argvalue). `func` is the the function for which we're processing args. `argname` is the name of the argument we're processing. `argvalue` is the value of the argument we're processing. Examples -------- >>> def _ensure_tuple(func, argname, arg): ... if isinstance(arg, tuple): ... return argvalue ... try: ... return tuple(arg) ... except TypeError: ... raise TypeError( ... "%s() expected argument '%s' to" ... " be iterable, but got %s instead." % ( ... func.__name__, argname, arg, ... ) ... ) ... >>> @preprocess(arg=_ensure_tuple) ... def foo(arg): ... return arg ... >>> foo([1, 2, 3]) (1, 2, 3) >>> foo("a") ('a',) >>> foo(2) Traceback (most recent call last): ... TypeError: foo() expected argument 'arg' to be iterable, but got 2 instead. """ if _unused: raise TypeError("preprocess() doesn't accept positional arguments") def _decorator(f): args, varargs, varkw, defaults = argspec = getargspec(f) if defaults is None: defaults = () no_defaults = (NO_DEFAULT,) * (len(args) - len(defaults)) args_defaults = list(zip(args, no_defaults + defaults)) if varargs: args_defaults.append((varargs, NO_DEFAULT)) if varkw: args_defaults.append((varkw, NO_DEFAULT)) argset = set(args) | {varargs, varkw} - {None} # Arguments can be declared as tuples in Python 2. if not all(isinstance(arg, str) for arg in args): raise TypeError( "Can't validate functions using tuple unpacking: %s" % (argspec,) ) # Ensure that all processors map to valid names. bad_names = viewkeys(processors) - argset if bad_names: raise TypeError( "Got processors for unknown arguments: %s." % bad_names ) return _build_preprocessed_function( f, processors, args_defaults, varargs, varkw, ) return _decorator
def function[preprocess, parameter[]]: constant[ Decorator that applies pre-processors to the arguments of a function before calling the function. Parameters ---------- **processors : dict Map from argument name -> processor function. A processor function takes three arguments: (func, argname, argvalue). `func` is the the function for which we're processing args. `argname` is the name of the argument we're processing. `argvalue` is the value of the argument we're processing. Examples -------- >>> def _ensure_tuple(func, argname, arg): ... if isinstance(arg, tuple): ... return argvalue ... try: ... return tuple(arg) ... except TypeError: ... raise TypeError( ... "%s() expected argument '%s' to" ... " be iterable, but got %s instead." % ( ... func.__name__, argname, arg, ... ) ... ) ... >>> @preprocess(arg=_ensure_tuple) ... def foo(arg): ... return arg ... >>> foo([1, 2, 3]) (1, 2, 3) >>> foo("a") ('a',) >>> foo(2) Traceback (most recent call last): ... TypeError: foo() expected argument 'arg' to be iterable, but got 2 instead. ] if name[_unused] begin[:] <ast.Raise object at 0x7da1b2025180> def function[_decorator, parameter[f]]: <ast.Tuple object at 0x7da1b20648b0> assign[=] call[name[getargspec], parameter[name[f]]] if compare[name[defaults] is constant[None]] begin[:] variable[defaults] assign[=] tuple[[]] variable[no_defaults] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b20649a0>]] * binary_operation[call[name[len], parameter[name[args]]] - call[name[len], parameter[name[defaults]]]]] variable[args_defaults] assign[=] call[name[list], parameter[call[name[zip], parameter[name[args], binary_operation[name[no_defaults] + name[defaults]]]]]] if name[varargs] begin[:] call[name[args_defaults].append, parameter[tuple[[<ast.Name object at 0x7da1b2066320>, <ast.Name object at 0x7da1b2064940>]]]] if name[varkw] begin[:] call[name[args_defaults].append, parameter[tuple[[<ast.Name object at 0x7da1b2065e40>, <ast.Name object at 0x7da1b2066440>]]]] variable[argset] assign[=] binary_operation[call[name[set], parameter[name[args]]] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[<ast.Set object at 0x7da1b2067460> - <ast.Set object at 0x7da1b2064f40>]] if <ast.UnaryOp object at 0x7da1b20676a0> begin[:] <ast.Raise object at 0x7da1b1ea2e30> variable[bad_names] assign[=] binary_operation[call[name[viewkeys], parameter[name[processors]]] - name[argset]] if name[bad_names] begin[:] <ast.Raise object at 0x7da1b1ea12d0> return[call[name[_build_preprocessed_function], parameter[name[f], name[processors], name[args_defaults], name[varargs], name[varkw]]]] return[name[_decorator]]
keyword[def] identifier[preprocess] (* identifier[_unused] ,** identifier[processors] ): literal[string] keyword[if] identifier[_unused] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[def] identifier[_decorator] ( identifier[f] ): identifier[args] , identifier[varargs] , identifier[varkw] , identifier[defaults] = identifier[argspec] = identifier[getargspec] ( identifier[f] ) keyword[if] identifier[defaults] keyword[is] keyword[None] : identifier[defaults] =() identifier[no_defaults] =( identifier[NO_DEFAULT] ,)*( identifier[len] ( identifier[args] )- identifier[len] ( identifier[defaults] )) identifier[args_defaults] = identifier[list] ( identifier[zip] ( identifier[args] , identifier[no_defaults] + identifier[defaults] )) keyword[if] identifier[varargs] : identifier[args_defaults] . identifier[append] (( identifier[varargs] , identifier[NO_DEFAULT] )) keyword[if] identifier[varkw] : identifier[args_defaults] . identifier[append] (( identifier[varkw] , identifier[NO_DEFAULT] )) identifier[argset] = identifier[set] ( identifier[args] )|{ identifier[varargs] , identifier[varkw] }-{ keyword[None] } keyword[if] keyword[not] identifier[all] ( identifier[isinstance] ( identifier[arg] , identifier[str] ) keyword[for] identifier[arg] keyword[in] identifier[args] ): keyword[raise] identifier[TypeError] ( literal[string] % ( identifier[argspec] ,) ) identifier[bad_names] = identifier[viewkeys] ( identifier[processors] )- identifier[argset] keyword[if] identifier[bad_names] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[bad_names] ) keyword[return] identifier[_build_preprocessed_function] ( identifier[f] , identifier[processors] , identifier[args_defaults] , identifier[varargs] , identifier[varkw] , ) keyword[return] identifier[_decorator]
def preprocess(*_unused, **processors): """ Decorator that applies pre-processors to the arguments of a function before calling the function. Parameters ---------- **processors : dict Map from argument name -> processor function. A processor function takes three arguments: (func, argname, argvalue). `func` is the the function for which we're processing args. `argname` is the name of the argument we're processing. `argvalue` is the value of the argument we're processing. Examples -------- >>> def _ensure_tuple(func, argname, arg): ... if isinstance(arg, tuple): ... return argvalue ... try: ... return tuple(arg) ... except TypeError: ... raise TypeError( ... "%s() expected argument '%s' to" ... " be iterable, but got %s instead." % ( ... func.__name__, argname, arg, ... ) ... ) ... >>> @preprocess(arg=_ensure_tuple) ... def foo(arg): ... return arg ... >>> foo([1, 2, 3]) (1, 2, 3) >>> foo("a") ('a',) >>> foo(2) Traceback (most recent call last): ... TypeError: foo() expected argument 'arg' to be iterable, but got 2 instead. """ if _unused: raise TypeError("preprocess() doesn't accept positional arguments") # depends on [control=['if'], data=[]] def _decorator(f): (args, varargs, varkw, defaults) = argspec = getargspec(f) if defaults is None: defaults = () # depends on [control=['if'], data=['defaults']] no_defaults = (NO_DEFAULT,) * (len(args) - len(defaults)) args_defaults = list(zip(args, no_defaults + defaults)) if varargs: args_defaults.append((varargs, NO_DEFAULT)) # depends on [control=['if'], data=[]] if varkw: args_defaults.append((varkw, NO_DEFAULT)) # depends on [control=['if'], data=[]] argset = set(args) | {varargs, varkw} - {None} # Arguments can be declared as tuples in Python 2. if not all((isinstance(arg, str) for arg in args)): raise TypeError("Can't validate functions using tuple unpacking: %s" % (argspec,)) # depends on [control=['if'], data=[]] # Ensure that all processors map to valid names. bad_names = viewkeys(processors) - argset if bad_names: raise TypeError('Got processors for unknown arguments: %s.' % bad_names) # depends on [control=['if'], data=[]] return _build_preprocessed_function(f, processors, args_defaults, varargs, varkw) return _decorator
def _evaluate_standard(op, op_str, a, b, **eval_kwargs): """ standard evaluation """ if _TEST_MODE: _store_test_result(False) with np.errstate(all='ignore'): return op(a, b)
def function[_evaluate_standard, parameter[op, op_str, a, b]]: constant[ standard evaluation ] if name[_TEST_MODE] begin[:] call[name[_store_test_result], parameter[constant[False]]] with call[name[np].errstate, parameter[]] begin[:] return[call[name[op], parameter[name[a], name[b]]]]
keyword[def] identifier[_evaluate_standard] ( identifier[op] , identifier[op_str] , identifier[a] , identifier[b] ,** identifier[eval_kwargs] ): literal[string] keyword[if] identifier[_TEST_MODE] : identifier[_store_test_result] ( keyword[False] ) keyword[with] identifier[np] . identifier[errstate] ( identifier[all] = literal[string] ): keyword[return] identifier[op] ( identifier[a] , identifier[b] )
def _evaluate_standard(op, op_str, a, b, **eval_kwargs): """ standard evaluation """ if _TEST_MODE: _store_test_result(False) # depends on [control=['if'], data=[]] with np.errstate(all='ignore'): return op(a, b) # depends on [control=['with'], data=[]]
def write(self, __text: str) -> None: """Write text to the debug stream. Args: __text: Text to write """ if __text == os.linesep: self.handle.write(__text) else: frame = inspect.currentframe() if frame is None: filename = 'unknown' lineno = 0 else: outer = frame.f_back filename = outer.f_code.co_filename.split(os.sep)[-1] lineno = outer.f_lineno self.handle.write('[{:>15s}:{:03d}] {}'.format(filename[-15:], lineno, __text))
def function[write, parameter[self, __text]]: constant[Write text to the debug stream. Args: __text: Text to write ] if compare[name[__text] equal[==] name[os].linesep] begin[:] call[name[self].handle.write, parameter[name[__text]]]
keyword[def] identifier[write] ( identifier[self] , identifier[__text] : identifier[str] )-> keyword[None] : literal[string] keyword[if] identifier[__text] == identifier[os] . identifier[linesep] : identifier[self] . identifier[handle] . identifier[write] ( identifier[__text] ) keyword[else] : identifier[frame] = identifier[inspect] . identifier[currentframe] () keyword[if] identifier[frame] keyword[is] keyword[None] : identifier[filename] = literal[string] identifier[lineno] = literal[int] keyword[else] : identifier[outer] = identifier[frame] . identifier[f_back] identifier[filename] = identifier[outer] . identifier[f_code] . identifier[co_filename] . identifier[split] ( identifier[os] . identifier[sep] )[- literal[int] ] identifier[lineno] = identifier[outer] . identifier[f_lineno] identifier[self] . identifier[handle] . identifier[write] ( literal[string] . identifier[format] ( identifier[filename] [- literal[int] :], identifier[lineno] , identifier[__text] ))
def write(self, __text: str) -> None: """Write text to the debug stream. Args: __text: Text to write """ if __text == os.linesep: self.handle.write(__text) # depends on [control=['if'], data=['__text']] else: frame = inspect.currentframe() if frame is None: filename = 'unknown' lineno = 0 # depends on [control=['if'], data=[]] else: outer = frame.f_back filename = outer.f_code.co_filename.split(os.sep)[-1] lineno = outer.f_lineno self.handle.write('[{:>15s}:{:03d}] {}'.format(filename[-15:], lineno, __text))
def mean_rate(self): """ Returns the mean rate of the events since the start of the process. """ if self.counter.value == 0: return 0.0 else: elapsed = time() - self.start_time return self.counter.value / elapsed
def function[mean_rate, parameter[self]]: constant[ Returns the mean rate of the events since the start of the process. ] if compare[name[self].counter.value equal[==] constant[0]] begin[:] return[constant[0.0]]
keyword[def] identifier[mean_rate] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[counter] . identifier[value] == literal[int] : keyword[return] literal[int] keyword[else] : identifier[elapsed] = identifier[time] ()- identifier[self] . identifier[start_time] keyword[return] identifier[self] . identifier[counter] . identifier[value] / identifier[elapsed]
def mean_rate(self): """ Returns the mean rate of the events since the start of the process. """ if self.counter.value == 0: return 0.0 # depends on [control=['if'], data=[]] else: elapsed = time() - self.start_time return self.counter.value / elapsed
def sine(x): ''' sine(x) is equivalent to sin(x) except that it also works on sparse arrays. ''' if sps.issparse(x): x = x.copy() x.data = np.sine(x.data) return x else: return np.sin(x)
def function[sine, parameter[x]]: constant[ sine(x) is equivalent to sin(x) except that it also works on sparse arrays. ] if call[name[sps].issparse, parameter[name[x]]] begin[:] variable[x] assign[=] call[name[x].copy, parameter[]] name[x].data assign[=] call[name[np].sine, parameter[name[x].data]] return[name[x]]
keyword[def] identifier[sine] ( identifier[x] ): literal[string] keyword[if] identifier[sps] . identifier[issparse] ( identifier[x] ): identifier[x] = identifier[x] . identifier[copy] () identifier[x] . identifier[data] = identifier[np] . identifier[sine] ( identifier[x] . identifier[data] ) keyword[return] identifier[x] keyword[else] : keyword[return] identifier[np] . identifier[sin] ( identifier[x] )
def sine(x): """ sine(x) is equivalent to sin(x) except that it also works on sparse arrays. """ if sps.issparse(x): x = x.copy() x.data = np.sine(x.data) return x # depends on [control=['if'], data=[]] else: return np.sin(x)
def GetMessages(self, formatter_mediator, event): """Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() file_reference = event_values.get('file_reference', None) if file_reference: event_values['file_reference'] = '{0:d}-{1:d}'.format( file_reference & 0xffffffffffff, file_reference >> 48) parent_file_reference = event_values.get('parent_file_reference', None) if parent_file_reference: event_values['parent_file_reference'] = '{0:d}-{1:d}'.format( parent_file_reference & 0xffffffffffff, parent_file_reference >> 48) update_reason_flags = event_values.get('update_reason_flags', 0) update_reasons = [] for bitmask, description in sorted(self._USN_REASON_FLAGS.items()): if bitmask & update_reason_flags: update_reasons.append(description) event_values['update_reason'] = ', '.join(update_reasons) update_source_flags = event_values.get('update_source_flags', 0) update_sources = [] for bitmask, description in sorted(self._USN_SOURCE_FLAGS.items()): if bitmask & update_source_flags: update_sources.append(description) event_values['update_source'] = ', '.join(update_sources) return self._ConditionalFormatMessages(event_values)
def function[GetMessages, parameter[self, formatter_mediator, event]]: constant[Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. ] if compare[name[self].DATA_TYPE not_equal[!=] name[event].data_type] begin[:] <ast.Raise object at 0x7da18c4cf310> variable[event_values] assign[=] call[name[event].CopyToDict, parameter[]] variable[file_reference] assign[=] call[name[event_values].get, parameter[constant[file_reference], constant[None]]] if name[file_reference] begin[:] call[name[event_values]][constant[file_reference]] assign[=] call[constant[{0:d}-{1:d}].format, parameter[binary_operation[name[file_reference] <ast.BitAnd object at 0x7da2590d6b60> constant[281474976710655]], binary_operation[name[file_reference] <ast.RShift object at 0x7da2590d6a40> constant[48]]]] variable[parent_file_reference] assign[=] call[name[event_values].get, parameter[constant[parent_file_reference], constant[None]]] if name[parent_file_reference] begin[:] call[name[event_values]][constant[parent_file_reference]] assign[=] call[constant[{0:d}-{1:d}].format, parameter[binary_operation[name[parent_file_reference] <ast.BitAnd object at 0x7da2590d6b60> constant[281474976710655]], binary_operation[name[parent_file_reference] <ast.RShift object at 0x7da2590d6a40> constant[48]]]] variable[update_reason_flags] assign[=] call[name[event_values].get, parameter[constant[update_reason_flags], constant[0]]] variable[update_reasons] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18c4cce50>, <ast.Name object at 0x7da18c4cc910>]]] in starred[call[name[sorted], parameter[call[name[self]._USN_REASON_FLAGS.items, parameter[]]]]] begin[:] if binary_operation[name[bitmask] <ast.BitAnd object at 0x7da2590d6b60> name[update_reason_flags]] begin[:] call[name[update_reasons].append, parameter[name[description]]] call[name[event_values]][constant[update_reason]] assign[=] call[constant[, ].join, parameter[name[update_reasons]]] variable[update_source_flags] assign[=] call[name[event_values].get, parameter[constant[update_source_flags], constant[0]]] variable[update_sources] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18c4ccbe0>, <ast.Name object at 0x7da18c4cc250>]]] in starred[call[name[sorted], parameter[call[name[self]._USN_SOURCE_FLAGS.items, parameter[]]]]] begin[:] if binary_operation[name[bitmask] <ast.BitAnd object at 0x7da2590d6b60> name[update_source_flags]] begin[:] call[name[update_sources].append, parameter[name[description]]] call[name[event_values]][constant[update_source]] assign[=] call[constant[, ].join, parameter[name[update_sources]]] return[call[name[self]._ConditionalFormatMessages, parameter[name[event_values]]]]
keyword[def] identifier[GetMessages] ( identifier[self] , identifier[formatter_mediator] , identifier[event] ): literal[string] keyword[if] identifier[self] . identifier[DATA_TYPE] != identifier[event] . identifier[data_type] : keyword[raise] identifier[errors] . identifier[WrongFormatter] ( literal[string] . identifier[format] ( identifier[event] . identifier[data_type] )) identifier[event_values] = identifier[event] . identifier[CopyToDict] () identifier[file_reference] = identifier[event_values] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[file_reference] : identifier[event_values] [ literal[string] ]= literal[string] . identifier[format] ( identifier[file_reference] & literal[int] , identifier[file_reference] >> literal[int] ) identifier[parent_file_reference] = identifier[event_values] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[parent_file_reference] : identifier[event_values] [ literal[string] ]= literal[string] . identifier[format] ( identifier[parent_file_reference] & literal[int] , identifier[parent_file_reference] >> literal[int] ) identifier[update_reason_flags] = identifier[event_values] . identifier[get] ( literal[string] , literal[int] ) identifier[update_reasons] =[] keyword[for] identifier[bitmask] , identifier[description] keyword[in] identifier[sorted] ( identifier[self] . identifier[_USN_REASON_FLAGS] . identifier[items] ()): keyword[if] identifier[bitmask] & identifier[update_reason_flags] : identifier[update_reasons] . identifier[append] ( identifier[description] ) identifier[event_values] [ literal[string] ]= literal[string] . identifier[join] ( identifier[update_reasons] ) identifier[update_source_flags] = identifier[event_values] . identifier[get] ( literal[string] , literal[int] ) identifier[update_sources] =[] keyword[for] identifier[bitmask] , identifier[description] keyword[in] identifier[sorted] ( identifier[self] . identifier[_USN_SOURCE_FLAGS] . identifier[items] ()): keyword[if] identifier[bitmask] & identifier[update_source_flags] : identifier[update_sources] . identifier[append] ( identifier[description] ) identifier[event_values] [ literal[string] ]= literal[string] . identifier[join] ( identifier[update_sources] ) keyword[return] identifier[self] . identifier[_ConditionalFormatMessages] ( identifier[event_values] )
def GetMessages(self, formatter_mediator, event): """Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(event.data_type)) # depends on [control=['if'], data=[]] event_values = event.CopyToDict() file_reference = event_values.get('file_reference', None) if file_reference: event_values['file_reference'] = '{0:d}-{1:d}'.format(file_reference & 281474976710655, file_reference >> 48) # depends on [control=['if'], data=[]] parent_file_reference = event_values.get('parent_file_reference', None) if parent_file_reference: event_values['parent_file_reference'] = '{0:d}-{1:d}'.format(parent_file_reference & 281474976710655, parent_file_reference >> 48) # depends on [control=['if'], data=[]] update_reason_flags = event_values.get('update_reason_flags', 0) update_reasons = [] for (bitmask, description) in sorted(self._USN_REASON_FLAGS.items()): if bitmask & update_reason_flags: update_reasons.append(description) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] event_values['update_reason'] = ', '.join(update_reasons) update_source_flags = event_values.get('update_source_flags', 0) update_sources = [] for (bitmask, description) in sorted(self._USN_SOURCE_FLAGS.items()): if bitmask & update_source_flags: update_sources.append(description) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] event_values['update_source'] = ', '.join(update_sources) return self._ConditionalFormatMessages(event_values)
def compareSNPs(before, after, outFileName): """Compares two set of SNPs. :param before: the names of the markers in the ``before`` file. :param after: the names of the markers in the ``after`` file. :param outFileName: the name of the output file. :type before: set :type after: set :type outFileName: str Finds the difference between two sets of markers, and write them in the ``outFileName`` file. .. note:: A :py:class:`ProgramError` is raised if: 1. There are more markers in the ``after`` set than in the ``before`` set. 2. Some markers that are in the ``after`` set are not in the ``before`` set. """ # First, check that "before" is larger than "after" if len(after) > len(before): msg = "there are more SNPs after than before" raise ProgramError(msg) # Checks that all the SNPs "after" are in "before" if not (after <= before): msg = "some after SNPs are not in before" raise ProgramError(msg) # Printing the SNPs try: with open(outFileName, "w") as outputFile: differences = before - after if len(differences) > 0: print >>outputFile, "\n".join(differences) except IOError: msg = "%(outFileName)s: can't write to file" % locals() raise ProgramError(msg)
def function[compareSNPs, parameter[before, after, outFileName]]: constant[Compares two set of SNPs. :param before: the names of the markers in the ``before`` file. :param after: the names of the markers in the ``after`` file. :param outFileName: the name of the output file. :type before: set :type after: set :type outFileName: str Finds the difference between two sets of markers, and write them in the ``outFileName`` file. .. note:: A :py:class:`ProgramError` is raised if: 1. There are more markers in the ``after`` set than in the ``before`` set. 2. Some markers that are in the ``after`` set are not in the ``before`` set. ] if compare[call[name[len], parameter[name[after]]] greater[>] call[name[len], parameter[name[before]]]] begin[:] variable[msg] assign[=] constant[there are more SNPs after than before] <ast.Raise object at 0x7da1b0ad9240> if <ast.UnaryOp object at 0x7da1b0ada6b0> begin[:] variable[msg] assign[=] constant[some after SNPs are not in before] <ast.Raise object at 0x7da1b0adb0d0> <ast.Try object at 0x7da1b0ada2c0>
keyword[def] identifier[compareSNPs] ( identifier[before] , identifier[after] , identifier[outFileName] ): literal[string] keyword[if] identifier[len] ( identifier[after] )> identifier[len] ( identifier[before] ): identifier[msg] = literal[string] keyword[raise] identifier[ProgramError] ( identifier[msg] ) keyword[if] keyword[not] ( identifier[after] <= identifier[before] ): identifier[msg] = literal[string] keyword[raise] identifier[ProgramError] ( identifier[msg] ) keyword[try] : keyword[with] identifier[open] ( identifier[outFileName] , literal[string] ) keyword[as] identifier[outputFile] : identifier[differences] = identifier[before] - identifier[after] keyword[if] identifier[len] ( identifier[differences] )> literal[int] : identifier[print] >> identifier[outputFile] , literal[string] . identifier[join] ( identifier[differences] ) keyword[except] identifier[IOError] : identifier[msg] = literal[string] % identifier[locals] () keyword[raise] identifier[ProgramError] ( identifier[msg] )
def compareSNPs(before, after, outFileName): """Compares two set of SNPs. :param before: the names of the markers in the ``before`` file. :param after: the names of the markers in the ``after`` file. :param outFileName: the name of the output file. :type before: set :type after: set :type outFileName: str Finds the difference between two sets of markers, and write them in the ``outFileName`` file. .. note:: A :py:class:`ProgramError` is raised if: 1. There are more markers in the ``after`` set than in the ``before`` set. 2. Some markers that are in the ``after`` set are not in the ``before`` set. """ # First, check that "before" is larger than "after" if len(after) > len(before): msg = 'there are more SNPs after than before' raise ProgramError(msg) # depends on [control=['if'], data=[]] # Checks that all the SNPs "after" are in "before" if not after <= before: msg = 'some after SNPs are not in before' raise ProgramError(msg) # depends on [control=['if'], data=[]] # Printing the SNPs try: with open(outFileName, 'w') as outputFile: differences = before - after if len(differences) > 0: (print >> outputFile, '\n'.join(differences)) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['outputFile']] # depends on [control=['try'], data=[]] except IOError: msg = "%(outFileName)s: can't write to file" % locals() raise ProgramError(msg) # depends on [control=['except'], data=[]]
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile): """ Projection File Read from File Method """ # Set file extension property self.fileExtension = extension # Open file and parse into a data structure with io_open(path, 'r') as f: self.projection = f.read()
def function[_read, parameter[self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile]]: constant[ Projection File Read from File Method ] name[self].fileExtension assign[=] name[extension] with call[name[io_open], parameter[name[path], constant[r]]] begin[:] name[self].projection assign[=] call[name[f].read, parameter[]]
keyword[def] identifier[_read] ( identifier[self] , identifier[directory] , identifier[filename] , identifier[session] , identifier[path] , identifier[name] , identifier[extension] , identifier[spatial] , identifier[spatialReferenceID] , identifier[replaceParamFile] ): literal[string] identifier[self] . identifier[fileExtension] = identifier[extension] keyword[with] identifier[io_open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : identifier[self] . identifier[projection] = identifier[f] . identifier[read] ()
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile): """ Projection File Read from File Method """ # Set file extension property self.fileExtension = extension # Open file and parse into a data structure with io_open(path, 'r') as f: self.projection = f.read() # depends on [control=['with'], data=['f']]
def light_general_attention(key, context, hidden_size, projected_align=False): """ It is a implementation of the Luong et al. attention mechanism with general score. Based on the paper: https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation" Args: key: A tensorflow tensor with dimensionality [None, None, key_size] context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size] hidden_size: Number of units in hidden representation projected_align: Using dense layer for hidden representation of context. If true, between input and attention mechanism insert a dense layer with dimensionality [hidden_size]. If false, a dense layer is not used. Returns: output: Tensor at the output with dimensionality [None, None, hidden_size] """ batch_size = tf.shape(context)[0] max_num_tokens, token_size = context.get_shape().as_list()[-2:] r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size]) # projected_key: [None, None, hidden_size] projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav()) r_projected_key = tf.reshape(projected_key, shape=[-1, hidden_size, 1]) # projected context: [None, None, hidden_size] projected_context = \ tf.layers.dense(r_context, hidden_size, kernel_initializer=xav()) attn = tf.nn.softmax(tf.matmul(projected_context, r_projected_key), dim=1) if projected_align: log.info("Using projected attention alignment") t_context = tf.transpose(projected_context, [0, 2, 1]) output = tf.reshape(tf.matmul(t_context, attn), shape=[batch_size, -1, hidden_size]) else: log.info("Using without projected attention alignment") t_context = tf.transpose(r_context, [0, 2, 1]) output = tf.reshape(tf.matmul(t_context, attn), shape=[batch_size, -1, token_size]) return output
def function[light_general_attention, parameter[key, context, hidden_size, projected_align]]: constant[ It is a implementation of the Luong et al. attention mechanism with general score. Based on the paper: https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation" Args: key: A tensorflow tensor with dimensionality [None, None, key_size] context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size] hidden_size: Number of units in hidden representation projected_align: Using dense layer for hidden representation of context. If true, between input and attention mechanism insert a dense layer with dimensionality [hidden_size]. If false, a dense layer is not used. Returns: output: Tensor at the output with dimensionality [None, None, hidden_size] ] variable[batch_size] assign[=] call[call[name[tf].shape, parameter[name[context]]]][constant[0]] <ast.Tuple object at 0x7da20c7ca830> assign[=] call[call[call[name[context].get_shape, parameter[]].as_list, parameter[]]][<ast.Slice object at 0x7da20c7c8730>] variable[r_context] assign[=] call[name[tf].reshape, parameter[name[context]]] variable[projected_key] assign[=] call[name[tf].layers.dense, parameter[name[key], name[hidden_size]]] variable[r_projected_key] assign[=] call[name[tf].reshape, parameter[name[projected_key]]] variable[projected_context] assign[=] call[name[tf].layers.dense, parameter[name[r_context], name[hidden_size]]] variable[attn] assign[=] call[name[tf].nn.softmax, parameter[call[name[tf].matmul, parameter[name[projected_context], name[r_projected_key]]]]] if name[projected_align] begin[:] call[name[log].info, parameter[constant[Using projected attention alignment]]] variable[t_context] assign[=] call[name[tf].transpose, parameter[name[projected_context], list[[<ast.Constant object at 0x7da20e9631f0>, <ast.Constant object at 0x7da20e963700>, <ast.Constant object at 0x7da20e962110>]]]] variable[output] assign[=] call[name[tf].reshape, parameter[call[name[tf].matmul, parameter[name[t_context], name[attn]]]]] return[name[output]]
keyword[def] identifier[light_general_attention] ( identifier[key] , identifier[context] , identifier[hidden_size] , identifier[projected_align] = keyword[False] ): literal[string] identifier[batch_size] = identifier[tf] . identifier[shape] ( identifier[context] )[ literal[int] ] identifier[max_num_tokens] , identifier[token_size] = identifier[context] . identifier[get_shape] (). identifier[as_list] ()[- literal[int] :] identifier[r_context] = identifier[tf] . identifier[reshape] ( identifier[context] , identifier[shape] =[- literal[int] , identifier[max_num_tokens] , identifier[token_size] ]) identifier[projected_key] = identifier[tf] . identifier[layers] . identifier[dense] ( identifier[key] , identifier[hidden_size] , identifier[kernel_initializer] = identifier[xav] ()) identifier[r_projected_key] = identifier[tf] . identifier[reshape] ( identifier[projected_key] , identifier[shape] =[- literal[int] , identifier[hidden_size] , literal[int] ]) identifier[projected_context] = identifier[tf] . identifier[layers] . identifier[dense] ( identifier[r_context] , identifier[hidden_size] , identifier[kernel_initializer] = identifier[xav] ()) identifier[attn] = identifier[tf] . identifier[nn] . identifier[softmax] ( identifier[tf] . identifier[matmul] ( identifier[projected_context] , identifier[r_projected_key] ), identifier[dim] = literal[int] ) keyword[if] identifier[projected_align] : identifier[log] . identifier[info] ( literal[string] ) identifier[t_context] = identifier[tf] . identifier[transpose] ( identifier[projected_context] ,[ literal[int] , literal[int] , literal[int] ]) identifier[output] = identifier[tf] . identifier[reshape] ( identifier[tf] . identifier[matmul] ( identifier[t_context] , identifier[attn] ), identifier[shape] =[ identifier[batch_size] ,- literal[int] , identifier[hidden_size] ]) keyword[else] : identifier[log] . identifier[info] ( literal[string] ) identifier[t_context] = identifier[tf] . identifier[transpose] ( identifier[r_context] ,[ literal[int] , literal[int] , literal[int] ]) identifier[output] = identifier[tf] . identifier[reshape] ( identifier[tf] . identifier[matmul] ( identifier[t_context] , identifier[attn] ), identifier[shape] =[ identifier[batch_size] ,- literal[int] , identifier[token_size] ]) keyword[return] identifier[output]
def light_general_attention(key, context, hidden_size, projected_align=False): """ It is a implementation of the Luong et al. attention mechanism with general score. Based on the paper: https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation" Args: key: A tensorflow tensor with dimensionality [None, None, key_size] context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size] hidden_size: Number of units in hidden representation projected_align: Using dense layer for hidden representation of context. If true, between input and attention mechanism insert a dense layer with dimensionality [hidden_size]. If false, a dense layer is not used. Returns: output: Tensor at the output with dimensionality [None, None, hidden_size] """ batch_size = tf.shape(context)[0] (max_num_tokens, token_size) = context.get_shape().as_list()[-2:] r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size]) # projected_key: [None, None, hidden_size] projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav()) r_projected_key = tf.reshape(projected_key, shape=[-1, hidden_size, 1]) # projected context: [None, None, hidden_size] projected_context = tf.layers.dense(r_context, hidden_size, kernel_initializer=xav()) attn = tf.nn.softmax(tf.matmul(projected_context, r_projected_key), dim=1) if projected_align: log.info('Using projected attention alignment') t_context = tf.transpose(projected_context, [0, 2, 1]) output = tf.reshape(tf.matmul(t_context, attn), shape=[batch_size, -1, hidden_size]) # depends on [control=['if'], data=[]] else: log.info('Using without projected attention alignment') t_context = tf.transpose(r_context, [0, 2, 1]) output = tf.reshape(tf.matmul(t_context, attn), shape=[batch_size, -1, token_size]) return output
def ccmod_class_label_lookup(label): """Get a CCMOD class from a label string.""" clsmod = {'ism': admm_ccmod.ConvCnstrMOD_IterSM, 'cg': admm_ccmod.ConvCnstrMOD_CG, 'cns': admm_ccmod.ConvCnstrMOD_Consensus, 'fista': fista_ccmod.ConvCnstrMOD} if label in clsmod: return clsmod[label] else: raise ValueError('Unknown ConvCnstrMOD solver method %s' % label)
def function[ccmod_class_label_lookup, parameter[label]]: constant[Get a CCMOD class from a label string.] variable[clsmod] assign[=] dictionary[[<ast.Constant object at 0x7da1b0798d90>, <ast.Constant object at 0x7da1b0798160>, <ast.Constant object at 0x7da1b0798910>, <ast.Constant object at 0x7da1b0798280>], [<ast.Attribute object at 0x7da1b0799570>, <ast.Attribute object at 0x7da1b0799600>, <ast.Attribute object at 0x7da1b0799690>, <ast.Attribute object at 0x7da1b0799000>]] if compare[name[label] in name[clsmod]] begin[:] return[call[name[clsmod]][name[label]]]
keyword[def] identifier[ccmod_class_label_lookup] ( identifier[label] ): literal[string] identifier[clsmod] ={ literal[string] : identifier[admm_ccmod] . identifier[ConvCnstrMOD_IterSM] , literal[string] : identifier[admm_ccmod] . identifier[ConvCnstrMOD_CG] , literal[string] : identifier[admm_ccmod] . identifier[ConvCnstrMOD_Consensus] , literal[string] : identifier[fista_ccmod] . identifier[ConvCnstrMOD] } keyword[if] identifier[label] keyword[in] identifier[clsmod] : keyword[return] identifier[clsmod] [ identifier[label] ] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[label] )
def ccmod_class_label_lookup(label): """Get a CCMOD class from a label string.""" clsmod = {'ism': admm_ccmod.ConvCnstrMOD_IterSM, 'cg': admm_ccmod.ConvCnstrMOD_CG, 'cns': admm_ccmod.ConvCnstrMOD_Consensus, 'fista': fista_ccmod.ConvCnstrMOD} if label in clsmod: return clsmod[label] # depends on [control=['if'], data=['label', 'clsmod']] else: raise ValueError('Unknown ConvCnstrMOD solver method %s' % label)
def used_labels(self): """ Returns a list of required labels for this instruction """ result = [] tmp = self.asm.strip(' \n\r\t') if not len(tmp) or tmp[0] in ('#', ';'): return result try: tmpLexer = asmlex.lex.lex(object=asmlex.Lexer(), lextab='zxbasmlextab') tmpLexer.input(tmp) while True: token = tmpLexer.token() if not token: break if token.type == 'ID': result += [token.value] except: pass return result
def function[used_labels, parameter[self]]: constant[ Returns a list of required labels for this instruction ] variable[result] assign[=] list[[]] variable[tmp] assign[=] call[name[self].asm.strip, parameter[constant[ ]]] if <ast.BoolOp object at 0x7da18dc048b0> begin[:] return[name[result]] <ast.Try object at 0x7da18dc05d20> return[name[result]]
keyword[def] identifier[used_labels] ( identifier[self] ): literal[string] identifier[result] =[] identifier[tmp] = identifier[self] . identifier[asm] . identifier[strip] ( literal[string] ) keyword[if] keyword[not] identifier[len] ( identifier[tmp] ) keyword[or] identifier[tmp] [ literal[int] ] keyword[in] ( literal[string] , literal[string] ): keyword[return] identifier[result] keyword[try] : identifier[tmpLexer] = identifier[asmlex] . identifier[lex] . identifier[lex] ( identifier[object] = identifier[asmlex] . identifier[Lexer] (), identifier[lextab] = literal[string] ) identifier[tmpLexer] . identifier[input] ( identifier[tmp] ) keyword[while] keyword[True] : identifier[token] = identifier[tmpLexer] . identifier[token] () keyword[if] keyword[not] identifier[token] : keyword[break] keyword[if] identifier[token] . identifier[type] == literal[string] : identifier[result] +=[ identifier[token] . identifier[value] ] keyword[except] : keyword[pass] keyword[return] identifier[result]
def used_labels(self): """ Returns a list of required labels for this instruction """ result = [] tmp = self.asm.strip(' \n\r\t') if not len(tmp) or tmp[0] in ('#', ';'): return result # depends on [control=['if'], data=[]] try: tmpLexer = asmlex.lex.lex(object=asmlex.Lexer(), lextab='zxbasmlextab') tmpLexer.input(tmp) while True: token = tmpLexer.token() if not token: break # depends on [control=['if'], data=[]] if token.type == 'ID': result += [token.value] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] return result
def set_items_shuffled(self, shuffle): """Sets the shuffle flag. The shuffle flag may be overidden by other assessment sequencing rules. arg: shuffle (boolean): ``true`` if the items are shuffled, ``false`` if the items appear in the designated order raise: InvalidArgument - ``shuffle`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.resource.ResourceForm.set_group_template if self.get_items_shuffled_metadata().is_read_only(): raise errors.NoAccess() if not self._is_valid_boolean(shuffle): raise errors.InvalidArgument() self._my_map['itemsShuffled'] = shuffle
def function[set_items_shuffled, parameter[self, shuffle]]: constant[Sets the shuffle flag. The shuffle flag may be overidden by other assessment sequencing rules. arg: shuffle (boolean): ``true`` if the items are shuffled, ``false`` if the items appear in the designated order raise: InvalidArgument - ``shuffle`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.* ] if call[call[name[self].get_items_shuffled_metadata, parameter[]].is_read_only, parameter[]] begin[:] <ast.Raise object at 0x7da20c7cad40> if <ast.UnaryOp object at 0x7da20c7cab00> begin[:] <ast.Raise object at 0x7da20c7c9f60> call[name[self]._my_map][constant[itemsShuffled]] assign[=] name[shuffle]
keyword[def] identifier[set_items_shuffled] ( identifier[self] , identifier[shuffle] ): literal[string] keyword[if] identifier[self] . identifier[get_items_shuffled_metadata] (). identifier[is_read_only] (): keyword[raise] identifier[errors] . identifier[NoAccess] () keyword[if] keyword[not] identifier[self] . identifier[_is_valid_boolean] ( identifier[shuffle] ): keyword[raise] identifier[errors] . identifier[InvalidArgument] () identifier[self] . identifier[_my_map] [ literal[string] ]= identifier[shuffle]
def set_items_shuffled(self, shuffle): """Sets the shuffle flag. The shuffle flag may be overidden by other assessment sequencing rules. arg: shuffle (boolean): ``true`` if the items are shuffled, ``false`` if the items appear in the designated order raise: InvalidArgument - ``shuffle`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for osid.resource.ResourceForm.set_group_template if self.get_items_shuffled_metadata().is_read_only(): raise errors.NoAccess() # depends on [control=['if'], data=[]] if not self._is_valid_boolean(shuffle): raise errors.InvalidArgument() # depends on [control=['if'], data=[]] self._my_map['itemsShuffled'] = shuffle
def addMargin(self, margin, index=None): """Adds a new margin. index: index in the list of margins. Default: to the end of the list """ if index is None: self._margins.append(margin) else: self._margins.insert(index, margin) if margin.isVisible(): self.updateViewport()
def function[addMargin, parameter[self, margin, index]]: constant[Adds a new margin. index: index in the list of margins. Default: to the end of the list ] if compare[name[index] is constant[None]] begin[:] call[name[self]._margins.append, parameter[name[margin]]] if call[name[margin].isVisible, parameter[]] begin[:] call[name[self].updateViewport, parameter[]]
keyword[def] identifier[addMargin] ( identifier[self] , identifier[margin] , identifier[index] = keyword[None] ): literal[string] keyword[if] identifier[index] keyword[is] keyword[None] : identifier[self] . identifier[_margins] . identifier[append] ( identifier[margin] ) keyword[else] : identifier[self] . identifier[_margins] . identifier[insert] ( identifier[index] , identifier[margin] ) keyword[if] identifier[margin] . identifier[isVisible] (): identifier[self] . identifier[updateViewport] ()
def addMargin(self, margin, index=None): """Adds a new margin. index: index in the list of margins. Default: to the end of the list """ if index is None: self._margins.append(margin) # depends on [control=['if'], data=[]] else: self._margins.insert(index, margin) if margin.isVisible(): self.updateViewport() # depends on [control=['if'], data=[]]
def _apply_decorators(func, decorators): """Apply a list of decorators to a given function. ``decorators`` may contain items that are ``None`` or ``False`` which will be ignored. """ decorators = filter(_is_not_none_or_false, reversed(decorators)) for decorator in decorators: func = decorator(func) return func
def function[_apply_decorators, parameter[func, decorators]]: constant[Apply a list of decorators to a given function. ``decorators`` may contain items that are ``None`` or ``False`` which will be ignored. ] variable[decorators] assign[=] call[name[filter], parameter[name[_is_not_none_or_false], call[name[reversed], parameter[name[decorators]]]]] for taget[name[decorator]] in starred[name[decorators]] begin[:] variable[func] assign[=] call[name[decorator], parameter[name[func]]] return[name[func]]
keyword[def] identifier[_apply_decorators] ( identifier[func] , identifier[decorators] ): literal[string] identifier[decorators] = identifier[filter] ( identifier[_is_not_none_or_false] , identifier[reversed] ( identifier[decorators] )) keyword[for] identifier[decorator] keyword[in] identifier[decorators] : identifier[func] = identifier[decorator] ( identifier[func] ) keyword[return] identifier[func]
def _apply_decorators(func, decorators): """Apply a list of decorators to a given function. ``decorators`` may contain items that are ``None`` or ``False`` which will be ignored. """ decorators = filter(_is_not_none_or_false, reversed(decorators)) for decorator in decorators: func = decorator(func) # depends on [control=['for'], data=['decorator']] return func
def findRecordItem(self, record, parent=None): """ Looks through the tree hierarchy for the given record. :param record | <orb.Record> parent | <QTreeWidgetItem> || None :return <XOrbRecordItem> || None """ try: item = self._recordMapping[record]() except KeyError: return None if item is None: self._recordMapping.pop(record) return item
def function[findRecordItem, parameter[self, record, parent]]: constant[ Looks through the tree hierarchy for the given record. :param record | <orb.Record> parent | <QTreeWidgetItem> || None :return <XOrbRecordItem> || None ] <ast.Try object at 0x7da1b246ac80> if compare[name[item] is constant[None]] begin[:] call[name[self]._recordMapping.pop, parameter[name[record]]] return[name[item]]
keyword[def] identifier[findRecordItem] ( identifier[self] , identifier[record] , identifier[parent] = keyword[None] ): literal[string] keyword[try] : identifier[item] = identifier[self] . identifier[_recordMapping] [ identifier[record] ]() keyword[except] identifier[KeyError] : keyword[return] keyword[None] keyword[if] identifier[item] keyword[is] keyword[None] : identifier[self] . identifier[_recordMapping] . identifier[pop] ( identifier[record] ) keyword[return] identifier[item]
def findRecordItem(self, record, parent=None): """ Looks through the tree hierarchy for the given record. :param record | <orb.Record> parent | <QTreeWidgetItem> || None :return <XOrbRecordItem> || None """ try: item = self._recordMapping[record]() # depends on [control=['try'], data=[]] except KeyError: return None # depends on [control=['except'], data=[]] if item is None: self._recordMapping.pop(record) # depends on [control=['if'], data=[]] return item
def combine_columns(columns): """Combine ``columns`` into a single string. Example: >>> combine_columns(['eape', 'xml']) 'example' Args: columns (iterable): ordered columns to combine Returns: String of combined columns """ columns_zipped = itertools.zip_longest(*columns) return ''.join(x for zipped in columns_zipped for x in zipped if x)
def function[combine_columns, parameter[columns]]: constant[Combine ``columns`` into a single string. Example: >>> combine_columns(['eape', 'xml']) 'example' Args: columns (iterable): ordered columns to combine Returns: String of combined columns ] variable[columns_zipped] assign[=] call[name[itertools].zip_longest, parameter[<ast.Starred object at 0x7da1b2500880>]] return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b2503c40>]]]
keyword[def] identifier[combine_columns] ( identifier[columns] ): literal[string] identifier[columns_zipped] = identifier[itertools] . identifier[zip_longest] (* identifier[columns] ) keyword[return] literal[string] . identifier[join] ( identifier[x] keyword[for] identifier[zipped] keyword[in] identifier[columns_zipped] keyword[for] identifier[x] keyword[in] identifier[zipped] keyword[if] identifier[x] )
def combine_columns(columns): """Combine ``columns`` into a single string. Example: >>> combine_columns(['eape', 'xml']) 'example' Args: columns (iterable): ordered columns to combine Returns: String of combined columns """ columns_zipped = itertools.zip_longest(*columns) return ''.join((x for zipped in columns_zipped for x in zipped if x))
def _constraints_are_whitelisted(self, constraint_tuple): """ Detect whether a tuple of compatibility constraints matches constraints imposed by the merged list of the global constraints from PythonSetup and a user-supplied whitelist. """ if self._acceptable_interpreter_constraints == []: # The user wants to lint everything. return True return all(version.parse(constraint) in self._acceptable_interpreter_constraints for constraint in constraint_tuple)
def function[_constraints_are_whitelisted, parameter[self, constraint_tuple]]: constant[ Detect whether a tuple of compatibility constraints matches constraints imposed by the merged list of the global constraints from PythonSetup and a user-supplied whitelist. ] if compare[name[self]._acceptable_interpreter_constraints equal[==] list[[]]] begin[:] return[constant[True]] return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b22f8340>]]]
keyword[def] identifier[_constraints_are_whitelisted] ( identifier[self] , identifier[constraint_tuple] ): literal[string] keyword[if] identifier[self] . identifier[_acceptable_interpreter_constraints] ==[]: keyword[return] keyword[True] keyword[return] identifier[all] ( identifier[version] . identifier[parse] ( identifier[constraint] ) keyword[in] identifier[self] . identifier[_acceptable_interpreter_constraints] keyword[for] identifier[constraint] keyword[in] identifier[constraint_tuple] )
def _constraints_are_whitelisted(self, constraint_tuple): """ Detect whether a tuple of compatibility constraints matches constraints imposed by the merged list of the global constraints from PythonSetup and a user-supplied whitelist. """ if self._acceptable_interpreter_constraints == []: # The user wants to lint everything. return True # depends on [control=['if'], data=[]] return all((version.parse(constraint) in self._acceptable_interpreter_constraints for constraint in constraint_tuple))
def save(self, filename): """ Save the current buffer to `filename` Exisiting files with the same name will be overwritten. :param str filename: the name of the file to save to """ with open(filename, "wb") as fd: fd.write(self.__buff)
def function[save, parameter[self, filename]]: constant[ Save the current buffer to `filename` Exisiting files with the same name will be overwritten. :param str filename: the name of the file to save to ] with call[name[open], parameter[name[filename], constant[wb]]] begin[:] call[name[fd].write, parameter[name[self].__buff]]
keyword[def] identifier[save] ( identifier[self] , identifier[filename] ): literal[string] keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fd] : identifier[fd] . identifier[write] ( identifier[self] . identifier[__buff] )
def save(self, filename): """ Save the current buffer to `filename` Exisiting files with the same name will be overwritten. :param str filename: the name of the file to save to """ with open(filename, 'wb') as fd: fd.write(self.__buff) # depends on [control=['with'], data=['fd']]
def length(self, error=ERROR, min_depth=MIN_DEPTH): """Calculate the length of the path up to a certain position""" start_point = self.point(0) end_point = self.point(1) return segment_length(self, 0, 1, start_point, end_point, error, min_depth, 0)
def function[length, parameter[self, error, min_depth]]: constant[Calculate the length of the path up to a certain position] variable[start_point] assign[=] call[name[self].point, parameter[constant[0]]] variable[end_point] assign[=] call[name[self].point, parameter[constant[1]]] return[call[name[segment_length], parameter[name[self], constant[0], constant[1], name[start_point], name[end_point], name[error], name[min_depth], constant[0]]]]
keyword[def] identifier[length] ( identifier[self] , identifier[error] = identifier[ERROR] , identifier[min_depth] = identifier[MIN_DEPTH] ): literal[string] identifier[start_point] = identifier[self] . identifier[point] ( literal[int] ) identifier[end_point] = identifier[self] . identifier[point] ( literal[int] ) keyword[return] identifier[segment_length] ( identifier[self] , literal[int] , literal[int] , identifier[start_point] , identifier[end_point] , identifier[error] , identifier[min_depth] , literal[int] )
def length(self, error=ERROR, min_depth=MIN_DEPTH): """Calculate the length of the path up to a certain position""" start_point = self.point(0) end_point = self.point(1) return segment_length(self, 0, 1, start_point, end_point, error, min_depth, 0)
def api_request( self, method, path, query_params=None, data=None, content_type=None, headers=None, api_base_url=None, api_version=None, expect_json=True, _target_object=None, ): """Make a request over the HTTP transport to the API. You shouldn't need to use this method, but if you plan to interact with the API using these primitives, this is the correct one to use. :type method: str :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). Required. :type path: str :param path: The path to the resource (ie, ``'/b/bucket-name'``). Required. :type query_params: dict or list :param query_params: A dictionary of keys and values (or list of key-value pairs) to insert into the query string of the URL. :type data: str :param data: The data to send as the body of the request. Default is the empty string. :type content_type: str :param content_type: The proper MIME type of the data provided. Default is None. :type headers: dict :param headers: extra HTTP headers to be sent with the request. :type api_base_url: str :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. Default is the standard API base URL. :type api_version: str :param api_version: The version of the API to call. Typically you shouldn't provide this and instead use the default for the library. Default is the latest API version supported by google-cloud-python. :type expect_json: bool :param expect_json: If True, this method will try to parse the response as JSON and raise an exception if that cannot be done. Default is True. :type _target_object: :class:`object` :param _target_object: (Optional) Protected argument to be used by library callers. This can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. :raises ~google.cloud.exceptions.GoogleCloudError: if the response code is not 200 OK. :raises ValueError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. """ url = self.build_api_url( path=path, query_params=query_params, api_base_url=api_base_url, api_version=api_version, ) # Making the executive decision that any dictionary # data will be sent properly as JSON. if data and isinstance(data, dict): data = json.dumps(data) content_type = "application/json" response = self._make_request( method=method, url=url, data=data, content_type=content_type, headers=headers, target_object=_target_object, ) if not 200 <= response.status_code < 300: raise exceptions.from_http_response(response) if expect_json and response.content: return response.json() else: return response.content
def function[api_request, parameter[self, method, path, query_params, data, content_type, headers, api_base_url, api_version, expect_json, _target_object]]: constant[Make a request over the HTTP transport to the API. You shouldn't need to use this method, but if you plan to interact with the API using these primitives, this is the correct one to use. :type method: str :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). Required. :type path: str :param path: The path to the resource (ie, ``'/b/bucket-name'``). Required. :type query_params: dict or list :param query_params: A dictionary of keys and values (or list of key-value pairs) to insert into the query string of the URL. :type data: str :param data: The data to send as the body of the request. Default is the empty string. :type content_type: str :param content_type: The proper MIME type of the data provided. Default is None. :type headers: dict :param headers: extra HTTP headers to be sent with the request. :type api_base_url: str :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. Default is the standard API base URL. :type api_version: str :param api_version: The version of the API to call. Typically you shouldn't provide this and instead use the default for the library. Default is the latest API version supported by google-cloud-python. :type expect_json: bool :param expect_json: If True, this method will try to parse the response as JSON and raise an exception if that cannot be done. Default is True. :type _target_object: :class:`object` :param _target_object: (Optional) Protected argument to be used by library callers. This can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. :raises ~google.cloud.exceptions.GoogleCloudError: if the response code is not 200 OK. :raises ValueError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. ] variable[url] assign[=] call[name[self].build_api_url, parameter[]] if <ast.BoolOp object at 0x7da204565b40> begin[:] variable[data] assign[=] call[name[json].dumps, parameter[name[data]]] variable[content_type] assign[=] constant[application/json] variable[response] assign[=] call[name[self]._make_request, parameter[]] if <ast.UnaryOp object at 0x7da204565390> begin[:] <ast.Raise object at 0x7da204565f60> if <ast.BoolOp object at 0x7da204564940> begin[:] return[call[name[response].json, parameter[]]]
keyword[def] identifier[api_request] ( identifier[self] , identifier[method] , identifier[path] , identifier[query_params] = keyword[None] , identifier[data] = keyword[None] , identifier[content_type] = keyword[None] , identifier[headers] = keyword[None] , identifier[api_base_url] = keyword[None] , identifier[api_version] = keyword[None] , identifier[expect_json] = keyword[True] , identifier[_target_object] = keyword[None] , ): literal[string] identifier[url] = identifier[self] . identifier[build_api_url] ( identifier[path] = identifier[path] , identifier[query_params] = identifier[query_params] , identifier[api_base_url] = identifier[api_base_url] , identifier[api_version] = identifier[api_version] , ) keyword[if] identifier[data] keyword[and] identifier[isinstance] ( identifier[data] , identifier[dict] ): identifier[data] = identifier[json] . identifier[dumps] ( identifier[data] ) identifier[content_type] = literal[string] identifier[response] = identifier[self] . identifier[_make_request] ( identifier[method] = identifier[method] , identifier[url] = identifier[url] , identifier[data] = identifier[data] , identifier[content_type] = identifier[content_type] , identifier[headers] = identifier[headers] , identifier[target_object] = identifier[_target_object] , ) keyword[if] keyword[not] literal[int] <= identifier[response] . identifier[status_code] < literal[int] : keyword[raise] identifier[exceptions] . identifier[from_http_response] ( identifier[response] ) keyword[if] identifier[expect_json] keyword[and] identifier[response] . identifier[content] : keyword[return] identifier[response] . identifier[json] () keyword[else] : keyword[return] identifier[response] . identifier[content]
def api_request(self, method, path, query_params=None, data=None, content_type=None, headers=None, api_base_url=None, api_version=None, expect_json=True, _target_object=None): """Make a request over the HTTP transport to the API. You shouldn't need to use this method, but if you plan to interact with the API using these primitives, this is the correct one to use. :type method: str :param method: The HTTP method name (ie, ``GET``, ``POST``, etc). Required. :type path: str :param path: The path to the resource (ie, ``'/b/bucket-name'``). Required. :type query_params: dict or list :param query_params: A dictionary of keys and values (or list of key-value pairs) to insert into the query string of the URL. :type data: str :param data: The data to send as the body of the request. Default is the empty string. :type content_type: str :param content_type: The proper MIME type of the data provided. Default is None. :type headers: dict :param headers: extra HTTP headers to be sent with the request. :type api_base_url: str :param api_base_url: The base URL for the API endpoint. Typically you won't have to provide this. Default is the standard API base URL. :type api_version: str :param api_version: The version of the API to call. Typically you shouldn't provide this and instead use the default for the library. Default is the latest API version supported by google-cloud-python. :type expect_json: bool :param expect_json: If True, this method will try to parse the response as JSON and raise an exception if that cannot be done. Default is True. :type _target_object: :class:`object` :param _target_object: (Optional) Protected argument to be used by library callers. This can allow custom behavior, for example, to defer an HTTP request and complete initialization of the object at a later time. :raises ~google.cloud.exceptions.GoogleCloudError: if the response code is not 200 OK. :raises ValueError: if the response content type is not JSON. :rtype: dict or str :returns: The API response payload, either as a raw string or a dictionary if the response is valid JSON. """ url = self.build_api_url(path=path, query_params=query_params, api_base_url=api_base_url, api_version=api_version) # Making the executive decision that any dictionary # data will be sent properly as JSON. if data and isinstance(data, dict): data = json.dumps(data) content_type = 'application/json' # depends on [control=['if'], data=[]] response = self._make_request(method=method, url=url, data=data, content_type=content_type, headers=headers, target_object=_target_object) if not 200 <= response.status_code < 300: raise exceptions.from_http_response(response) # depends on [control=['if'], data=[]] if expect_json and response.content: return response.json() # depends on [control=['if'], data=[]] else: return response.content
def option2tuple(opt): """Return a tuple of option, taking possible presence of level into account""" if isinstance(opt[0], int): tup = opt[1], opt[2:] else: tup = opt[0], opt[1:] return tup
def function[option2tuple, parameter[opt]]: constant[Return a tuple of option, taking possible presence of level into account] if call[name[isinstance], parameter[call[name[opt]][constant[0]], name[int]]] begin[:] variable[tup] assign[=] tuple[[<ast.Subscript object at 0x7da2054a7580>, <ast.Subscript object at 0x7da2054a6bf0>]] return[name[tup]]
keyword[def] identifier[option2tuple] ( identifier[opt] ): literal[string] keyword[if] identifier[isinstance] ( identifier[opt] [ literal[int] ], identifier[int] ): identifier[tup] = identifier[opt] [ literal[int] ], identifier[opt] [ literal[int] :] keyword[else] : identifier[tup] = identifier[opt] [ literal[int] ], identifier[opt] [ literal[int] :] keyword[return] identifier[tup]
def option2tuple(opt): """Return a tuple of option, taking possible presence of level into account""" if isinstance(opt[0], int): tup = (opt[1], opt[2:]) # depends on [control=['if'], data=[]] else: tup = (opt[0], opt[1:]) return tup
def get_item_ids(self): """This is out of spec, but required for adaptive assessment parts?""" item_ids = [] if self.has_items(): for idstr in self._my_map['itemIds']: item_ids.append(idstr) return IdList(item_ids)
def function[get_item_ids, parameter[self]]: constant[This is out of spec, but required for adaptive assessment parts?] variable[item_ids] assign[=] list[[]] if call[name[self].has_items, parameter[]] begin[:] for taget[name[idstr]] in starred[call[name[self]._my_map][constant[itemIds]]] begin[:] call[name[item_ids].append, parameter[name[idstr]]] return[call[name[IdList], parameter[name[item_ids]]]]
keyword[def] identifier[get_item_ids] ( identifier[self] ): literal[string] identifier[item_ids] =[] keyword[if] identifier[self] . identifier[has_items] (): keyword[for] identifier[idstr] keyword[in] identifier[self] . identifier[_my_map] [ literal[string] ]: identifier[item_ids] . identifier[append] ( identifier[idstr] ) keyword[return] identifier[IdList] ( identifier[item_ids] )
def get_item_ids(self): """This is out of spec, but required for adaptive assessment parts?""" item_ids = [] if self.has_items(): for idstr in self._my_map['itemIds']: item_ids.append(idstr) # depends on [control=['for'], data=['idstr']] # depends on [control=['if'], data=[]] return IdList(item_ids)
def align_with(self, other): """ Align the dataframe's index with another. """ return self.__class__(self.data.reindex_like(other), **self._kwargs)
def function[align_with, parameter[self, other]]: constant[ Align the dataframe's index with another. ] return[call[name[self].__class__, parameter[call[name[self].data.reindex_like, parameter[name[other]]]]]]
keyword[def] identifier[align_with] ( identifier[self] , identifier[other] ): literal[string] keyword[return] identifier[self] . identifier[__class__] ( identifier[self] . identifier[data] . identifier[reindex_like] ( identifier[other] ),** identifier[self] . identifier[_kwargs] )
def align_with(self, other): """ Align the dataframe's index with another. """ return self.__class__(self.data.reindex_like(other), **self._kwargs)
def cut_region(self, x, y, radius, data): """Return a cut region (radius) pixels away from (x, y) in (data). """ n = radius ht, wd = data.shape x0, x1 = max(0, x - n), min(wd - 1, x + n) y0, y1 = max(0, y - n), min(ht - 1, y + n) arr = data[y0:y1 + 1, x0:x1 + 1] return (x0, y0, arr)
def function[cut_region, parameter[self, x, y, radius, data]]: constant[Return a cut region (radius) pixels away from (x, y) in (data). ] variable[n] assign[=] name[radius] <ast.Tuple object at 0x7da20e957490> assign[=] name[data].shape <ast.Tuple object at 0x7da20e956f80> assign[=] tuple[[<ast.Call object at 0x7da204962a40>, <ast.Call object at 0x7da1b0c259f0>]] <ast.Tuple object at 0x7da1b0c26dd0> assign[=] tuple[[<ast.Call object at 0x7da1b0c25ba0>, <ast.Call object at 0x7da1b0c25cc0>]] variable[arr] assign[=] call[name[data]][tuple[[<ast.Slice object at 0x7da1b0c24700>, <ast.Slice object at 0x7da1b0c25000>]]] return[tuple[[<ast.Name object at 0x7da1b0c252a0>, <ast.Name object at 0x7da1b0c25cf0>, <ast.Name object at 0x7da1b0c25d20>]]]
keyword[def] identifier[cut_region] ( identifier[self] , identifier[x] , identifier[y] , identifier[radius] , identifier[data] ): literal[string] identifier[n] = identifier[radius] identifier[ht] , identifier[wd] = identifier[data] . identifier[shape] identifier[x0] , identifier[x1] = identifier[max] ( literal[int] , identifier[x] - identifier[n] ), identifier[min] ( identifier[wd] - literal[int] , identifier[x] + identifier[n] ) identifier[y0] , identifier[y1] = identifier[max] ( literal[int] , identifier[y] - identifier[n] ), identifier[min] ( identifier[ht] - literal[int] , identifier[y] + identifier[n] ) identifier[arr] = identifier[data] [ identifier[y0] : identifier[y1] + literal[int] , identifier[x0] : identifier[x1] + literal[int] ] keyword[return] ( identifier[x0] , identifier[y0] , identifier[arr] )
def cut_region(self, x, y, radius, data): """Return a cut region (radius) pixels away from (x, y) in (data). """ n = radius (ht, wd) = data.shape (x0, x1) = (max(0, x - n), min(wd - 1, x + n)) (y0, y1) = (max(0, y - n), min(ht - 1, y + n)) arr = data[y0:y1 + 1, x0:x1 + 1] return (x0, y0, arr)
def save_prov_to_files(self, showattributes=False): """ Write-out provn serialisation to nidm.provn. """ self.doc.add_bundle(self.bundle) # provn_file = os.path.join(self.export_dir, 'nidm.provn') # provn_fid = open(provn_file, 'w') # # FIXME None # # provn_fid.write(self.doc.get_provn(4).replace("None", "-")) # provn_fid.close() ttl_file = os.path.join(self.export_dir, 'nidm.ttl') ttl_txt = self.doc.serialize(format='rdf', rdf_format='turtle') ttl_txt, json_context = self.use_prefixes(ttl_txt) # Add namespaces to json-ld context for namespace in self.doc._namespaces.get_registered_namespaces(): json_context[namespace._prefix] = namespace._uri for namespace in \ list(self.doc._namespaces._default_namespaces.values()): json_context[namespace._prefix] = namespace._uri json_context["xsd"] = "http://www.w3.org/2000/01/rdf-schema#" # Work-around to issue with INF value in rdflib (reported in # https://github.com/RDFLib/rdflib/pull/655) ttl_txt = ttl_txt.replace(' inf ', ' "INF"^^xsd:float ') with open(ttl_file, 'w') as ttl_fid: ttl_fid.write(ttl_txt) # print(json_context) jsonld_file = os.path.join(self.export_dir, 'nidm.json') jsonld_txt = self.doc.serialize(format='rdf', rdf_format='json-ld', context=json_context) with open(jsonld_file, 'w') as jsonld_fid: jsonld_fid.write(jsonld_txt) # provjsonld_file = os.path.join(self.export_dir, 'nidm.provjsonld') # provjsonld_txt = self.doc.serialize(format='jsonld') # with open(provjsonld_file, 'w') as provjsonld_fid: # provjsonld_fid.write(provjsonld_txt) # provn_file = os.path.join(self.export_dir, 'nidm.provn') # provn_txt = self.doc.serialize(format='provn') # with open(provn_file, 'w') as provn_fid: # provn_fid.write(provn_txt) # Post-processing if not self.zipped: # Just rename temp directory to output_path os.rename(self.export_dir, self.out_dir) else: # Create a zip file that contains the content of the temp directory os.chdir(self.export_dir) zf = zipfile.ZipFile(os.path.join("..", self.out_dir), mode='w') try: for root, dirnames, filenames in os.walk("."): for filename in filenames: zf.write(os.path.join(filename)) finally: zf.close() # Need to move up before deleting the folder os.chdir("..") shutil.rmtree(os.path.join("..", self.export_dir))
def function[save_prov_to_files, parameter[self, showattributes]]: constant[ Write-out provn serialisation to nidm.provn. ] call[name[self].doc.add_bundle, parameter[name[self].bundle]] variable[ttl_file] assign[=] call[name[os].path.join, parameter[name[self].export_dir, constant[nidm.ttl]]] variable[ttl_txt] assign[=] call[name[self].doc.serialize, parameter[]] <ast.Tuple object at 0x7da1b0b5f0d0> assign[=] call[name[self].use_prefixes, parameter[name[ttl_txt]]] for taget[name[namespace]] in starred[call[name[self].doc._namespaces.get_registered_namespaces, parameter[]]] begin[:] call[name[json_context]][name[namespace]._prefix] assign[=] name[namespace]._uri for taget[name[namespace]] in starred[call[name[list], parameter[call[name[self].doc._namespaces._default_namespaces.values, parameter[]]]]] begin[:] call[name[json_context]][name[namespace]._prefix] assign[=] name[namespace]._uri call[name[json_context]][constant[xsd]] assign[=] constant[http://www.w3.org/2000/01/rdf-schema#] variable[ttl_txt] assign[=] call[name[ttl_txt].replace, parameter[constant[ inf ], constant[ "INF"^^xsd:float ]]] with call[name[open], parameter[name[ttl_file], constant[w]]] begin[:] call[name[ttl_fid].write, parameter[name[ttl_txt]]] variable[jsonld_file] assign[=] call[name[os].path.join, parameter[name[self].export_dir, constant[nidm.json]]] variable[jsonld_txt] assign[=] call[name[self].doc.serialize, parameter[]] with call[name[open], parameter[name[jsonld_file], constant[w]]] begin[:] call[name[jsonld_fid].write, parameter[name[jsonld_txt]]] if <ast.UnaryOp object at 0x7da1b0b5c340> begin[:] call[name[os].rename, parameter[name[self].export_dir, name[self].out_dir]]
keyword[def] identifier[save_prov_to_files] ( identifier[self] , identifier[showattributes] = keyword[False] ): literal[string] identifier[self] . identifier[doc] . identifier[add_bundle] ( identifier[self] . identifier[bundle] ) identifier[ttl_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[export_dir] , literal[string] ) identifier[ttl_txt] = identifier[self] . identifier[doc] . identifier[serialize] ( identifier[format] = literal[string] , identifier[rdf_format] = literal[string] ) identifier[ttl_txt] , identifier[json_context] = identifier[self] . identifier[use_prefixes] ( identifier[ttl_txt] ) keyword[for] identifier[namespace] keyword[in] identifier[self] . identifier[doc] . identifier[_namespaces] . identifier[get_registered_namespaces] (): identifier[json_context] [ identifier[namespace] . identifier[_prefix] ]= identifier[namespace] . identifier[_uri] keyword[for] identifier[namespace] keyword[in] identifier[list] ( identifier[self] . identifier[doc] . identifier[_namespaces] . identifier[_default_namespaces] . identifier[values] ()): identifier[json_context] [ identifier[namespace] . identifier[_prefix] ]= identifier[namespace] . identifier[_uri] identifier[json_context] [ literal[string] ]= literal[string] identifier[ttl_txt] = identifier[ttl_txt] . identifier[replace] ( literal[string] , literal[string] ) keyword[with] identifier[open] ( identifier[ttl_file] , literal[string] ) keyword[as] identifier[ttl_fid] : identifier[ttl_fid] . identifier[write] ( identifier[ttl_txt] ) identifier[jsonld_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[export_dir] , literal[string] ) identifier[jsonld_txt] = identifier[self] . identifier[doc] . identifier[serialize] ( identifier[format] = literal[string] , identifier[rdf_format] = literal[string] , identifier[context] = identifier[json_context] ) keyword[with] identifier[open] ( identifier[jsonld_file] , literal[string] ) keyword[as] identifier[jsonld_fid] : identifier[jsonld_fid] . identifier[write] ( identifier[jsonld_txt] ) keyword[if] keyword[not] identifier[self] . identifier[zipped] : identifier[os] . identifier[rename] ( identifier[self] . identifier[export_dir] , identifier[self] . identifier[out_dir] ) keyword[else] : identifier[os] . identifier[chdir] ( identifier[self] . identifier[export_dir] ) identifier[zf] = identifier[zipfile] . identifier[ZipFile] ( identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[self] . identifier[out_dir] ), identifier[mode] = literal[string] ) keyword[try] : keyword[for] identifier[root] , identifier[dirnames] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( literal[string] ): keyword[for] identifier[filename] keyword[in] identifier[filenames] : identifier[zf] . identifier[write] ( identifier[os] . identifier[path] . identifier[join] ( identifier[filename] )) keyword[finally] : identifier[zf] . identifier[close] () identifier[os] . identifier[chdir] ( literal[string] ) identifier[shutil] . identifier[rmtree] ( identifier[os] . identifier[path] . identifier[join] ( literal[string] , identifier[self] . identifier[export_dir] ))
def save_prov_to_files(self, showattributes=False): """ Write-out provn serialisation to nidm.provn. """ self.doc.add_bundle(self.bundle) # provn_file = os.path.join(self.export_dir, 'nidm.provn') # provn_fid = open(provn_file, 'w') # # FIXME None # # provn_fid.write(self.doc.get_provn(4).replace("None", "-")) # provn_fid.close() ttl_file = os.path.join(self.export_dir, 'nidm.ttl') ttl_txt = self.doc.serialize(format='rdf', rdf_format='turtle') (ttl_txt, json_context) = self.use_prefixes(ttl_txt) # Add namespaces to json-ld context for namespace in self.doc._namespaces.get_registered_namespaces(): json_context[namespace._prefix] = namespace._uri # depends on [control=['for'], data=['namespace']] for namespace in list(self.doc._namespaces._default_namespaces.values()): json_context[namespace._prefix] = namespace._uri # depends on [control=['for'], data=['namespace']] json_context['xsd'] = 'http://www.w3.org/2000/01/rdf-schema#' # Work-around to issue with INF value in rdflib (reported in # https://github.com/RDFLib/rdflib/pull/655) ttl_txt = ttl_txt.replace(' inf ', ' "INF"^^xsd:float ') with open(ttl_file, 'w') as ttl_fid: ttl_fid.write(ttl_txt) # depends on [control=['with'], data=['ttl_fid']] # print(json_context) jsonld_file = os.path.join(self.export_dir, 'nidm.json') jsonld_txt = self.doc.serialize(format='rdf', rdf_format='json-ld', context=json_context) with open(jsonld_file, 'w') as jsonld_fid: jsonld_fid.write(jsonld_txt) # depends on [control=['with'], data=['jsonld_fid']] # provjsonld_file = os.path.join(self.export_dir, 'nidm.provjsonld') # provjsonld_txt = self.doc.serialize(format='jsonld') # with open(provjsonld_file, 'w') as provjsonld_fid: # provjsonld_fid.write(provjsonld_txt) # provn_file = os.path.join(self.export_dir, 'nidm.provn') # provn_txt = self.doc.serialize(format='provn') # with open(provn_file, 'w') as provn_fid: # provn_fid.write(provn_txt) # Post-processing if not self.zipped: # Just rename temp directory to output_path os.rename(self.export_dir, self.out_dir) # depends on [control=['if'], data=[]] else: # Create a zip file that contains the content of the temp directory os.chdir(self.export_dir) zf = zipfile.ZipFile(os.path.join('..', self.out_dir), mode='w') try: for (root, dirnames, filenames) in os.walk('.'): for filename in filenames: zf.write(os.path.join(filename)) # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]] # depends on [control=['try'], data=[]] finally: zf.close() # Need to move up before deleting the folder os.chdir('..') shutil.rmtree(os.path.join('..', self.export_dir))
def possible_version_evaluation(self): """Evaluate the possible range of versions for each target, yielding the output analysis.""" only_broken = self.get_options().only_broken ranges = self._ranges yield 'Allowable JVM platform ranges (* = anything):' for target in sorted(filter(self._is_relevant, self.jvm_targets)): min_version = ranges.min_allowed_version.get(target) max_version = ranges.max_allowed_version.get(target) current_valid = True if min_version and self.jvm_version(target) < min_version: current_valid = False if max_version and self.jvm_version(target) > max_version: current_valid = False current_text = str(self.jvm_version(target)) if not current_valid: current_text = self._format_error(current_text) elif only_broken: continue if min_version and max_version: range_text = '{} to {}'.format(min_version, max_version) if min_version > max_version: range_text = self._format_error(range_text) elif min_version: range_text = '{}+'.format(min_version) elif max_version: range_text = '<={}'.format(max_version) else: range_text = '*' yield '{address}: {range} (is {current})'.format(address=target.address.spec, range=range_text, current=current_text,) if self.get_options().detailed or not current_valid: if min_version: min_because = [t for t in ranges.target_dependencies[target] if self.jvm_version(t) == min_version] yield ' min={} because of dependencies:'.format(min_version) for dep in sorted(min_because): yield ' {}'.format(dep.address.spec) if max_version: max_because = [t for t in ranges.target_dependees[target] if self.jvm_version(t) == max_version] yield ' max={} because of dependees:'.format(max_version) for dep in sorted(max_because): yield ' {}'.format(dep.address.spec) yield ''
def function[possible_version_evaluation, parameter[self]]: constant[Evaluate the possible range of versions for each target, yielding the output analysis.] variable[only_broken] assign[=] call[name[self].get_options, parameter[]].only_broken variable[ranges] assign[=] name[self]._ranges <ast.Yield object at 0x7da1b1eec7c0> for taget[name[target]] in starred[call[name[sorted], parameter[call[name[filter], parameter[name[self]._is_relevant, name[self].jvm_targets]]]]] begin[:] variable[min_version] assign[=] call[name[ranges].min_allowed_version.get, parameter[name[target]]] variable[max_version] assign[=] call[name[ranges].max_allowed_version.get, parameter[name[target]]] variable[current_valid] assign[=] constant[True] if <ast.BoolOp object at 0x7da1b1eee350> begin[:] variable[current_valid] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b1eed2d0> begin[:] variable[current_valid] assign[=] constant[False] variable[current_text] assign[=] call[name[str], parameter[call[name[self].jvm_version, parameter[name[target]]]]] if <ast.UnaryOp object at 0x7da1b1eec7f0> begin[:] variable[current_text] assign[=] call[name[self]._format_error, parameter[name[current_text]]] if <ast.BoolOp object at 0x7da1b1eed690> begin[:] variable[range_text] assign[=] call[constant[{} to {}].format, parameter[name[min_version], name[max_version]]] if compare[name[min_version] greater[>] name[max_version]] begin[:] variable[range_text] assign[=] call[name[self]._format_error, parameter[name[range_text]]] <ast.Yield object at 0x7da1b1eeca90> if <ast.BoolOp object at 0x7da1b1eed960> begin[:] if name[min_version] begin[:] variable[min_because] assign[=] <ast.ListComp object at 0x7da1b1eeff70> <ast.Yield object at 0x7da1b1eef3d0> for taget[name[dep]] in starred[call[name[sorted], parameter[name[min_because]]]] begin[:] <ast.Yield object at 0x7da1b1eeee90> if name[max_version] begin[:] variable[max_because] assign[=] <ast.ListComp object at 0x7da1b1eed3c0> <ast.Yield object at 0x7da1b1eee9b0> for taget[name[dep]] in starred[call[name[sorted], parameter[name[max_because]]]] begin[:] <ast.Yield object at 0x7da1b1eef580> <ast.Yield object at 0x7da1b1eefc10>
keyword[def] identifier[possible_version_evaluation] ( identifier[self] ): literal[string] identifier[only_broken] = identifier[self] . identifier[get_options] (). identifier[only_broken] identifier[ranges] = identifier[self] . identifier[_ranges] keyword[yield] literal[string] keyword[for] identifier[target] keyword[in] identifier[sorted] ( identifier[filter] ( identifier[self] . identifier[_is_relevant] , identifier[self] . identifier[jvm_targets] )): identifier[min_version] = identifier[ranges] . identifier[min_allowed_version] . identifier[get] ( identifier[target] ) identifier[max_version] = identifier[ranges] . identifier[max_allowed_version] . identifier[get] ( identifier[target] ) identifier[current_valid] = keyword[True] keyword[if] identifier[min_version] keyword[and] identifier[self] . identifier[jvm_version] ( identifier[target] )< identifier[min_version] : identifier[current_valid] = keyword[False] keyword[if] identifier[max_version] keyword[and] identifier[self] . identifier[jvm_version] ( identifier[target] )> identifier[max_version] : identifier[current_valid] = keyword[False] identifier[current_text] = identifier[str] ( identifier[self] . identifier[jvm_version] ( identifier[target] )) keyword[if] keyword[not] identifier[current_valid] : identifier[current_text] = identifier[self] . identifier[_format_error] ( identifier[current_text] ) keyword[elif] identifier[only_broken] : keyword[continue] keyword[if] identifier[min_version] keyword[and] identifier[max_version] : identifier[range_text] = literal[string] . identifier[format] ( identifier[min_version] , identifier[max_version] ) keyword[if] identifier[min_version] > identifier[max_version] : identifier[range_text] = identifier[self] . identifier[_format_error] ( identifier[range_text] ) keyword[elif] identifier[min_version] : identifier[range_text] = literal[string] . identifier[format] ( identifier[min_version] ) keyword[elif] identifier[max_version] : identifier[range_text] = literal[string] . identifier[format] ( identifier[max_version] ) keyword[else] : identifier[range_text] = literal[string] keyword[yield] literal[string] . identifier[format] ( identifier[address] = identifier[target] . identifier[address] . identifier[spec] , identifier[range] = identifier[range_text] , identifier[current] = identifier[current_text] ,) keyword[if] identifier[self] . identifier[get_options] (). identifier[detailed] keyword[or] keyword[not] identifier[current_valid] : keyword[if] identifier[min_version] : identifier[min_because] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[ranges] . identifier[target_dependencies] [ identifier[target] ] keyword[if] identifier[self] . identifier[jvm_version] ( identifier[t] )== identifier[min_version] ] keyword[yield] literal[string] . identifier[format] ( identifier[min_version] ) keyword[for] identifier[dep] keyword[in] identifier[sorted] ( identifier[min_because] ): keyword[yield] literal[string] . identifier[format] ( identifier[dep] . identifier[address] . identifier[spec] ) keyword[if] identifier[max_version] : identifier[max_because] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[ranges] . identifier[target_dependees] [ identifier[target] ] keyword[if] identifier[self] . identifier[jvm_version] ( identifier[t] )== identifier[max_version] ] keyword[yield] literal[string] . identifier[format] ( identifier[max_version] ) keyword[for] identifier[dep] keyword[in] identifier[sorted] ( identifier[max_because] ): keyword[yield] literal[string] . identifier[format] ( identifier[dep] . identifier[address] . identifier[spec] ) keyword[yield] literal[string]
def possible_version_evaluation(self): """Evaluate the possible range of versions for each target, yielding the output analysis.""" only_broken = self.get_options().only_broken ranges = self._ranges yield 'Allowable JVM platform ranges (* = anything):' for target in sorted(filter(self._is_relevant, self.jvm_targets)): min_version = ranges.min_allowed_version.get(target) max_version = ranges.max_allowed_version.get(target) current_valid = True if min_version and self.jvm_version(target) < min_version: current_valid = False # depends on [control=['if'], data=[]] if max_version and self.jvm_version(target) > max_version: current_valid = False # depends on [control=['if'], data=[]] current_text = str(self.jvm_version(target)) if not current_valid: current_text = self._format_error(current_text) # depends on [control=['if'], data=[]] elif only_broken: continue # depends on [control=['if'], data=[]] if min_version and max_version: range_text = '{} to {}'.format(min_version, max_version) if min_version > max_version: range_text = self._format_error(range_text) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif min_version: range_text = '{}+'.format(min_version) # depends on [control=['if'], data=[]] elif max_version: range_text = '<={}'.format(max_version) # depends on [control=['if'], data=[]] else: range_text = '*' yield '{address}: {range} (is {current})'.format(address=target.address.spec, range=range_text, current=current_text) if self.get_options().detailed or not current_valid: if min_version: min_because = [t for t in ranges.target_dependencies[target] if self.jvm_version(t) == min_version] yield ' min={} because of dependencies:'.format(min_version) for dep in sorted(min_because): yield ' {}'.format(dep.address.spec) # depends on [control=['for'], data=['dep']] # depends on [control=['if'], data=[]] if max_version: max_because = [t for t in ranges.target_dependees[target] if self.jvm_version(t) == max_version] yield ' max={} because of dependees:'.format(max_version) for dep in sorted(max_because): yield ' {}'.format(dep.address.spec) # depends on [control=['for'], data=['dep']] # depends on [control=['if'], data=[]] yield '' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['target']]
def tcp_traceflow(packet, *, count=NotImplemented): """Trace packet flow for TCP.""" if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] data = dict( protocol=LINKTYPE.get(packet.name.upper()), # data link type from global header index=count, # frame number frame=packet2dict(packet), # extracted packet syn=bool(tcp.flags.S), # TCP synchronise (SYN) flag fin=bool(tcp.flags.F), # TCP finish (FIN) flag src=ipaddress.ip_address(ip.src), # source IP dst=ipaddress.ip_address(ip.dst), # destination IP srcport=tcp.sport, # TCP source port dstport=tcp.dport, # TCP destination port timestamp=time.time(), # timestamp ) return True, data return False, None
def function[tcp_traceflow, parameter[packet]]: constant[Trace packet flow for TCP.] if compare[constant[TCP] in name[packet]] begin[:] variable[ip] assign[=] <ast.IfExp object at 0x7da1b060b190> variable[tcp] assign[=] call[name[packet]][constant[TCP]] variable[data] assign[=] call[name[dict], parameter[]] return[tuple[[<ast.Constant object at 0x7da1b06bd3f0>, <ast.Name object at 0x7da1b06be9b0>]]] return[tuple[[<ast.Constant object at 0x7da1b06bc6a0>, <ast.Constant object at 0x7da1b06bd270>]]]
keyword[def] identifier[tcp_traceflow] ( identifier[packet] ,*, identifier[count] = identifier[NotImplemented] ): literal[string] keyword[if] literal[string] keyword[in] identifier[packet] : identifier[ip] = identifier[packet] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[packet] keyword[else] identifier[packet] [ literal[string] ] identifier[tcp] = identifier[packet] [ literal[string] ] identifier[data] = identifier[dict] ( identifier[protocol] = identifier[LINKTYPE] . identifier[get] ( identifier[packet] . identifier[name] . identifier[upper] ()), identifier[index] = identifier[count] , identifier[frame] = identifier[packet2dict] ( identifier[packet] ), identifier[syn] = identifier[bool] ( identifier[tcp] . identifier[flags] . identifier[S] ), identifier[fin] = identifier[bool] ( identifier[tcp] . identifier[flags] . identifier[F] ), identifier[src] = identifier[ipaddress] . identifier[ip_address] ( identifier[ip] . identifier[src] ), identifier[dst] = identifier[ipaddress] . identifier[ip_address] ( identifier[ip] . identifier[dst] ), identifier[srcport] = identifier[tcp] . identifier[sport] , identifier[dstport] = identifier[tcp] . identifier[dport] , identifier[timestamp] = identifier[time] . identifier[time] (), ) keyword[return] keyword[True] , identifier[data] keyword[return] keyword[False] , keyword[None]
def tcp_traceflow(packet, *, count=NotImplemented): """Trace packet flow for TCP.""" if 'TCP' in packet: ip = packet['IP'] if 'IP' in packet else packet['IPv6'] tcp = packet['TCP'] # data link type from global header # frame number # extracted packet # TCP synchronise (SYN) flag # TCP finish (FIN) flag # source IP # destination IP # TCP source port # TCP destination port # timestamp data = dict(protocol=LINKTYPE.get(packet.name.upper()), index=count, frame=packet2dict(packet), syn=bool(tcp.flags.S), fin=bool(tcp.flags.F), src=ipaddress.ip_address(ip.src), dst=ipaddress.ip_address(ip.dst), srcport=tcp.sport, dstport=tcp.dport, timestamp=time.time()) return (True, data) # depends on [control=['if'], data=['packet']] return (False, None)
def get_curie(self, uri): '''Get a CURIE from a URI ''' prefix = self.get_curie_prefix(uri) if prefix is not None: key = self.curie_map[prefix] return '%s:%s' % (prefix, uri[len(key):len(uri)]) return None
def function[get_curie, parameter[self, uri]]: constant[Get a CURIE from a URI ] variable[prefix] assign[=] call[name[self].get_curie_prefix, parameter[name[uri]]] if compare[name[prefix] is_not constant[None]] begin[:] variable[key] assign[=] call[name[self].curie_map][name[prefix]] return[binary_operation[constant[%s:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb56950>, <ast.Subscript object at 0x7da18eb575e0>]]]] return[constant[None]]
keyword[def] identifier[get_curie] ( identifier[self] , identifier[uri] ): literal[string] identifier[prefix] = identifier[self] . identifier[get_curie_prefix] ( identifier[uri] ) keyword[if] identifier[prefix] keyword[is] keyword[not] keyword[None] : identifier[key] = identifier[self] . identifier[curie_map] [ identifier[prefix] ] keyword[return] literal[string] %( identifier[prefix] , identifier[uri] [ identifier[len] ( identifier[key] ): identifier[len] ( identifier[uri] )]) keyword[return] keyword[None]
def get_curie(self, uri): """Get a CURIE from a URI """ prefix = self.get_curie_prefix(uri) if prefix is not None: key = self.curie_map[prefix] return '%s:%s' % (prefix, uri[len(key):len(uri)]) # depends on [control=['if'], data=['prefix']] return None
def longest_interval(self) -> Optional[Interval]: """ Returns the longest interval, or ``None`` if none. """ longest_duration = self.longest_duration() for i in self.intervals: if i.duration() == longest_duration: return i return None
def function[longest_interval, parameter[self]]: constant[ Returns the longest interval, or ``None`` if none. ] variable[longest_duration] assign[=] call[name[self].longest_duration, parameter[]] for taget[name[i]] in starred[name[self].intervals] begin[:] if compare[call[name[i].duration, parameter[]] equal[==] name[longest_duration]] begin[:] return[name[i]] return[constant[None]]
keyword[def] identifier[longest_interval] ( identifier[self] )-> identifier[Optional] [ identifier[Interval] ]: literal[string] identifier[longest_duration] = identifier[self] . identifier[longest_duration] () keyword[for] identifier[i] keyword[in] identifier[self] . identifier[intervals] : keyword[if] identifier[i] . identifier[duration] ()== identifier[longest_duration] : keyword[return] identifier[i] keyword[return] keyword[None]
def longest_interval(self) -> Optional[Interval]: """ Returns the longest interval, or ``None`` if none. """ longest_duration = self.longest_duration() for i in self.intervals: if i.duration() == longest_duration: return i # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return None
def describe_jobflow(self, jobflow_id): """ Describes a single Elastic MapReduce job flow :type jobflow_id: str :param jobflow_id: The job flow id of interest """ jobflows = self.describe_jobflows(jobflow_ids=[jobflow_id]) if jobflows: return jobflows[0]
def function[describe_jobflow, parameter[self, jobflow_id]]: constant[ Describes a single Elastic MapReduce job flow :type jobflow_id: str :param jobflow_id: The job flow id of interest ] variable[jobflows] assign[=] call[name[self].describe_jobflows, parameter[]] if name[jobflows] begin[:] return[call[name[jobflows]][constant[0]]]
keyword[def] identifier[describe_jobflow] ( identifier[self] , identifier[jobflow_id] ): literal[string] identifier[jobflows] = identifier[self] . identifier[describe_jobflows] ( identifier[jobflow_ids] =[ identifier[jobflow_id] ]) keyword[if] identifier[jobflows] : keyword[return] identifier[jobflows] [ literal[int] ]
def describe_jobflow(self, jobflow_id): """ Describes a single Elastic MapReduce job flow :type jobflow_id: str :param jobflow_id: The job flow id of interest """ jobflows = self.describe_jobflows(jobflow_ids=[jobflow_id]) if jobflows: return jobflows[0] # depends on [control=['if'], data=[]]
def install_integration(self, id, **kwargs): # noqa: E501 """Installs a Wavefront integration # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.install_integration(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: ResponseContainerIntegrationStatus If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.install_integration_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.install_integration_with_http_info(id, **kwargs) # noqa: E501 return data
def function[install_integration, parameter[self, id]]: constant[Installs a Wavefront integration # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.install_integration(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: ResponseContainerIntegrationStatus If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].install_integration_with_http_info, parameter[name[id]]]]
keyword[def] identifier[install_integration] ( identifier[self] , identifier[id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[install_integration_with_http_info] ( identifier[id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[install_integration_with_http_info] ( identifier[id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def install_integration(self, id, **kwargs): # noqa: E501 'Installs a Wavefront integration # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.install_integration(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: ResponseContainerIntegrationStatus\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.install_integration_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.install_integration_with_http_info(id, **kwargs) # noqa: E501 return data
def copy_current_app_context(func: Callable) -> Callable: """Share the current app context with the function decorated. The app context is local per task and hence will not be available in any other task. This decorator can be used to make the context available, .. code-block:: python @copy_current_app_context async def within_context() -> None: name = current_app.name ... """ if not has_app_context(): raise RuntimeError('Attempt to copy app context outside of a app context') app_context = _app_ctx_stack.top.copy() @wraps(func) async def wrapper(*args: Any, **kwargs: Any) -> Any: async with app_context: return await func(*args, **kwargs) return wrapper
def function[copy_current_app_context, parameter[func]]: constant[Share the current app context with the function decorated. The app context is local per task and hence will not be available in any other task. This decorator can be used to make the context available, .. code-block:: python @copy_current_app_context async def within_context() -> None: name = current_app.name ... ] if <ast.UnaryOp object at 0x7da20e9b0490> begin[:] <ast.Raise object at 0x7da18f58d7e0> variable[app_context] assign[=] call[name[_app_ctx_stack].top.copy, parameter[]] <ast.AsyncFunctionDef object at 0x7da18f58c820> return[name[wrapper]]
keyword[def] identifier[copy_current_app_context] ( identifier[func] : identifier[Callable] )-> identifier[Callable] : literal[string] keyword[if] keyword[not] identifier[has_app_context] (): keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[app_context] = identifier[_app_ctx_stack] . identifier[top] . identifier[copy] () @ identifier[wraps] ( identifier[func] ) keyword[async] keyword[def] identifier[wrapper] (* identifier[args] : identifier[Any] ,** identifier[kwargs] : identifier[Any] )-> identifier[Any] : keyword[async] keyword[with] identifier[app_context] : keyword[return] keyword[await] identifier[func] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[wrapper]
def copy_current_app_context(func: Callable) -> Callable: """Share the current app context with the function decorated. The app context is local per task and hence will not be available in any other task. This decorator can be used to make the context available, .. code-block:: python @copy_current_app_context async def within_context() -> None: name = current_app.name ... """ if not has_app_context(): raise RuntimeError('Attempt to copy app context outside of a app context') # depends on [control=['if'], data=[]] app_context = _app_ctx_stack.top.copy() @wraps(func) async def wrapper(*args: Any, **kwargs: Any) -> Any: async with app_context: return await func(*args, **kwargs) return wrapper
def make_table_map(table, headers): """Create a function to map from rows with the structure of the headers to the structure of the table.""" header_parts = {} for i, h in enumerate(headers): header_parts[h] = 'row[{}]'.format(i) body_code = 'lambda row: [{}]'.format(','.join(header_parts.get(c.name, 'None') for c in table.columns)) header_code = 'lambda row: [{}]'.format( ','.join(header_parts.get(c.name, "'{}'".format(c.name)) for c in table.columns)) return eval(header_code), eval(body_code)
def function[make_table_map, parameter[table, headers]]: constant[Create a function to map from rows with the structure of the headers to the structure of the table.] variable[header_parts] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da204347190>, <ast.Name object at 0x7da2043441f0>]]] in starred[call[name[enumerate], parameter[name[headers]]]] begin[:] call[name[header_parts]][name[h]] assign[=] call[constant[row[{}]].format, parameter[name[i]]] variable[body_code] assign[=] call[constant[lambda row: [{}]].format, parameter[call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da1b0ebcbe0>]]]] variable[header_code] assign[=] call[constant[lambda row: [{}]].format, parameter[call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da2041dbf70>]]]] return[tuple[[<ast.Call object at 0x7da2041d98d0>, <ast.Call object at 0x7da2041d9270>]]]
keyword[def] identifier[make_table_map] ( identifier[table] , identifier[headers] ): literal[string] identifier[header_parts] ={} keyword[for] identifier[i] , identifier[h] keyword[in] identifier[enumerate] ( identifier[headers] ): identifier[header_parts] [ identifier[h] ]= literal[string] . identifier[format] ( identifier[i] ) identifier[body_code] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[header_parts] . identifier[get] ( identifier[c] . identifier[name] , literal[string] ) keyword[for] identifier[c] keyword[in] identifier[table] . identifier[columns] )) identifier[header_code] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[header_parts] . identifier[get] ( identifier[c] . identifier[name] , literal[string] . identifier[format] ( identifier[c] . identifier[name] )) keyword[for] identifier[c] keyword[in] identifier[table] . identifier[columns] )) keyword[return] identifier[eval] ( identifier[header_code] ), identifier[eval] ( identifier[body_code] )
def make_table_map(table, headers): """Create a function to map from rows with the structure of the headers to the structure of the table.""" header_parts = {} for (i, h) in enumerate(headers): header_parts[h] = 'row[{}]'.format(i) # depends on [control=['for'], data=[]] body_code = 'lambda row: [{}]'.format(','.join((header_parts.get(c.name, 'None') for c in table.columns))) header_code = 'lambda row: [{}]'.format(','.join((header_parts.get(c.name, "'{}'".format(c.name)) for c in table.columns))) return (eval(header_code), eval(body_code))
def decompose_covariance(c): """ This decomposes a covariance matrix into an error vector and a correlation matrix """ # make it a kickass copy of the original c = _n.array(c) # first get the error vector e = [] for n in range(0, len(c[0])): e.append(_n.sqrt(c[n][n])) # now cycle through the matrix, dividing by e[1]*e[2] for n in range(0, len(c[0])): for m in range(0, len(c[0])): c[n][m] = c[n][m] / (e[n]*e[m]) return [_n.array(e), _n.array(c)]
def function[decompose_covariance, parameter[c]]: constant[ This decomposes a covariance matrix into an error vector and a correlation matrix ] variable[c] assign[=] call[name[_n].array, parameter[name[c]]] variable[e] assign[=] list[[]] for taget[name[n]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[call[name[c]][constant[0]]]]]]] begin[:] call[name[e].append, parameter[call[name[_n].sqrt, parameter[call[call[name[c]][name[n]]][name[n]]]]]] for taget[name[n]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[call[name[c]][constant[0]]]]]]] begin[:] for taget[name[m]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[call[name[c]][constant[0]]]]]]] begin[:] call[call[name[c]][name[n]]][name[m]] assign[=] binary_operation[call[call[name[c]][name[n]]][name[m]] / binary_operation[call[name[e]][name[n]] * call[name[e]][name[m]]]] return[list[[<ast.Call object at 0x7da2047e97b0>, <ast.Call object at 0x7da2047eb700>]]]
keyword[def] identifier[decompose_covariance] ( identifier[c] ): literal[string] identifier[c] = identifier[_n] . identifier[array] ( identifier[c] ) identifier[e] =[] keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[c] [ literal[int] ])): identifier[e] . identifier[append] ( identifier[_n] . identifier[sqrt] ( identifier[c] [ identifier[n] ][ identifier[n] ])) keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[c] [ literal[int] ])): keyword[for] identifier[m] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[c] [ literal[int] ])): identifier[c] [ identifier[n] ][ identifier[m] ]= identifier[c] [ identifier[n] ][ identifier[m] ]/( identifier[e] [ identifier[n] ]* identifier[e] [ identifier[m] ]) keyword[return] [ identifier[_n] . identifier[array] ( identifier[e] ), identifier[_n] . identifier[array] ( identifier[c] )]
def decompose_covariance(c): """ This decomposes a covariance matrix into an error vector and a correlation matrix """ # make it a kickass copy of the original c = _n.array(c) # first get the error vector e = [] for n in range(0, len(c[0])): e.append(_n.sqrt(c[n][n])) # depends on [control=['for'], data=['n']] # now cycle through the matrix, dividing by e[1]*e[2] for n in range(0, len(c[0])): for m in range(0, len(c[0])): c[n][m] = c[n][m] / (e[n] * e[m]) # depends on [control=['for'], data=['m']] # depends on [control=['for'], data=['n']] return [_n.array(e), _n.array(c)]
def getSymmetricallyEncryptedVal(val, secretKey: Union[str, bytes] = None) -> \ Tuple[str, str]: """ Encrypt the provided value with symmetric encryption :param val: the value to encrypt :param secretKey: Optional key, if provided should be either in hex or bytes :return: Tuple of the encrypted value and secret key encoded in hex """ if isinstance(val, str): val = val.encode("utf-8") if secretKey: if isHex(secretKey): secretKey = bytes(bytearray.fromhex(secretKey)) elif not isinstance(secretKey, bytes): error("Secret key must be either in hex or bytes") box = libnacl.secret.SecretBox(secretKey) else: box = libnacl.secret.SecretBox() return box.encrypt(val).hex(), box.sk.hex()
def function[getSymmetricallyEncryptedVal, parameter[val, secretKey]]: constant[ Encrypt the provided value with symmetric encryption :param val: the value to encrypt :param secretKey: Optional key, if provided should be either in hex or bytes :return: Tuple of the encrypted value and secret key encoded in hex ] if call[name[isinstance], parameter[name[val], name[str]]] begin[:] variable[val] assign[=] call[name[val].encode, parameter[constant[utf-8]]] if name[secretKey] begin[:] if call[name[isHex], parameter[name[secretKey]]] begin[:] variable[secretKey] assign[=] call[name[bytes], parameter[call[name[bytearray].fromhex, parameter[name[secretKey]]]]] variable[box] assign[=] call[name[libnacl].secret.SecretBox, parameter[name[secretKey]]] return[tuple[[<ast.Call object at 0x7da18f09d9f0>, <ast.Call object at 0x7da18f09d3f0>]]]
keyword[def] identifier[getSymmetricallyEncryptedVal] ( identifier[val] , identifier[secretKey] : identifier[Union] [ identifier[str] , identifier[bytes] ]= keyword[None] )-> identifier[Tuple] [ identifier[str] , identifier[str] ]: literal[string] keyword[if] identifier[isinstance] ( identifier[val] , identifier[str] ): identifier[val] = identifier[val] . identifier[encode] ( literal[string] ) keyword[if] identifier[secretKey] : keyword[if] identifier[isHex] ( identifier[secretKey] ): identifier[secretKey] = identifier[bytes] ( identifier[bytearray] . identifier[fromhex] ( identifier[secretKey] )) keyword[elif] keyword[not] identifier[isinstance] ( identifier[secretKey] , identifier[bytes] ): identifier[error] ( literal[string] ) identifier[box] = identifier[libnacl] . identifier[secret] . identifier[SecretBox] ( identifier[secretKey] ) keyword[else] : identifier[box] = identifier[libnacl] . identifier[secret] . identifier[SecretBox] () keyword[return] identifier[box] . identifier[encrypt] ( identifier[val] ). identifier[hex] (), identifier[box] . identifier[sk] . identifier[hex] ()
def getSymmetricallyEncryptedVal(val, secretKey: Union[str, bytes]=None) -> Tuple[str, str]: """ Encrypt the provided value with symmetric encryption :param val: the value to encrypt :param secretKey: Optional key, if provided should be either in hex or bytes :return: Tuple of the encrypted value and secret key encoded in hex """ if isinstance(val, str): val = val.encode('utf-8') # depends on [control=['if'], data=[]] if secretKey: if isHex(secretKey): secretKey = bytes(bytearray.fromhex(secretKey)) # depends on [control=['if'], data=[]] elif not isinstance(secretKey, bytes): error('Secret key must be either in hex or bytes') # depends on [control=['if'], data=[]] box = libnacl.secret.SecretBox(secretKey) # depends on [control=['if'], data=[]] else: box = libnacl.secret.SecretBox() return (box.encrypt(val).hex(), box.sk.hex())
def set_connection_params(self, address, local_tsap, remote_tsap): """ Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates. This function must be called just before Cli_Connect(). :param address: PLC/Equipment IPV4 Address, for example "192.168.1.12" :param local_tsap: Local TSAP (PC TSAP) :param remote_tsap: Remote TSAP (PLC TSAP) """ assert re.match(ipv4, address), '%s is invalid ipv4' % address result = self.library.Cli_SetConnectionParams(self.pointer, address, c_uint16(local_tsap), c_uint16(remote_tsap)) if result != 0: raise Snap7Exception("The parameter was invalid")
def function[set_connection_params, parameter[self, address, local_tsap, remote_tsap]]: constant[ Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates. This function must be called just before Cli_Connect(). :param address: PLC/Equipment IPV4 Address, for example "192.168.1.12" :param local_tsap: Local TSAP (PC TSAP) :param remote_tsap: Remote TSAP (PLC TSAP) ] assert[call[name[re].match, parameter[name[ipv4], name[address]]]] variable[result] assign[=] call[name[self].library.Cli_SetConnectionParams, parameter[name[self].pointer, name[address], call[name[c_uint16], parameter[name[local_tsap]]], call[name[c_uint16], parameter[name[remote_tsap]]]]] if compare[name[result] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da2047e8a30>
keyword[def] identifier[set_connection_params] ( identifier[self] , identifier[address] , identifier[local_tsap] , identifier[remote_tsap] ): literal[string] keyword[assert] identifier[re] . identifier[match] ( identifier[ipv4] , identifier[address] ), literal[string] % identifier[address] identifier[result] = identifier[self] . identifier[library] . identifier[Cli_SetConnectionParams] ( identifier[self] . identifier[pointer] , identifier[address] , identifier[c_uint16] ( identifier[local_tsap] ), identifier[c_uint16] ( identifier[remote_tsap] )) keyword[if] identifier[result] != literal[int] : keyword[raise] identifier[Snap7Exception] ( literal[string] )
def set_connection_params(self, address, local_tsap, remote_tsap): """ Sets internally (IP, LocalTSAP, RemoteTSAP) Coordinates. This function must be called just before Cli_Connect(). :param address: PLC/Equipment IPV4 Address, for example "192.168.1.12" :param local_tsap: Local TSAP (PC TSAP) :param remote_tsap: Remote TSAP (PLC TSAP) """ assert re.match(ipv4, address), '%s is invalid ipv4' % address result = self.library.Cli_SetConnectionParams(self.pointer, address, c_uint16(local_tsap), c_uint16(remote_tsap)) if result != 0: raise Snap7Exception('The parameter was invalid') # depends on [control=['if'], data=[]]
def movav(y, Dx, dx): """ Moving average rectangular window filter: calculate average of signal y by using sliding rectangular window of size Dx using binsize dx Parameters ---------- y : numpy.ndarray Signal Dx : float Window length of filter. dx : float Bin size of signal sampling. Returns ------- numpy.ndarray Filtered signal. """ if Dx <= dx: return y else: ly = len(y) r = np.zeros(ly) n = np.int(np.round((Dx / dx))) r[0:np.int(n / 2.)] = 1.0 / n r[-np.int(n / 2.)::] = 1.0 / n R = np.fft.fft(r) Y = np.fft.fft(y) yf = np.fft.ifft(Y * R) return yf
def function[movav, parameter[y, Dx, dx]]: constant[ Moving average rectangular window filter: calculate average of signal y by using sliding rectangular window of size Dx using binsize dx Parameters ---------- y : numpy.ndarray Signal Dx : float Window length of filter. dx : float Bin size of signal sampling. Returns ------- numpy.ndarray Filtered signal. ] if compare[name[Dx] less_or_equal[<=] name[dx]] begin[:] return[name[y]]
keyword[def] identifier[movav] ( identifier[y] , identifier[Dx] , identifier[dx] ): literal[string] keyword[if] identifier[Dx] <= identifier[dx] : keyword[return] identifier[y] keyword[else] : identifier[ly] = identifier[len] ( identifier[y] ) identifier[r] = identifier[np] . identifier[zeros] ( identifier[ly] ) identifier[n] = identifier[np] . identifier[int] ( identifier[np] . identifier[round] (( identifier[Dx] / identifier[dx] ))) identifier[r] [ literal[int] : identifier[np] . identifier[int] ( identifier[n] / literal[int] )]= literal[int] / identifier[n] identifier[r] [- identifier[np] . identifier[int] ( identifier[n] / literal[int] )::]= literal[int] / identifier[n] identifier[R] = identifier[np] . identifier[fft] . identifier[fft] ( identifier[r] ) identifier[Y] = identifier[np] . identifier[fft] . identifier[fft] ( identifier[y] ) identifier[yf] = identifier[np] . identifier[fft] . identifier[ifft] ( identifier[Y] * identifier[R] ) keyword[return] identifier[yf]
def movav(y, Dx, dx): """ Moving average rectangular window filter: calculate average of signal y by using sliding rectangular window of size Dx using binsize dx Parameters ---------- y : numpy.ndarray Signal Dx : float Window length of filter. dx : float Bin size of signal sampling. Returns ------- numpy.ndarray Filtered signal. """ if Dx <= dx: return y # depends on [control=['if'], data=[]] else: ly = len(y) r = np.zeros(ly) n = np.int(np.round(Dx / dx)) r[0:np.int(n / 2.0)] = 1.0 / n r[-np.int(n / 2.0):] = 1.0 / n R = np.fft.fft(r) Y = np.fft.fft(y) yf = np.fft.ifft(Y * R) return yf
def Add(self, value, *optional): """ Overload ROOT's basic TList::Add to support supplying TListItemWithOption """ if isinstance(value, TListItemWithOption): if optional: raise RuntimeError( "option specified along with " "TListItemWithOption. Specify one or the " "other but not both.") return super(List, self).Add(value.item, value.option) return super(List, self).Add(value, *optional)
def function[Add, parameter[self, value]]: constant[ Overload ROOT's basic TList::Add to support supplying TListItemWithOption ] if call[name[isinstance], parameter[name[value], name[TListItemWithOption]]] begin[:] if name[optional] begin[:] <ast.Raise object at 0x7da1b1192350> return[call[call[name[super], parameter[name[List], name[self]]].Add, parameter[name[value].item, name[value].option]]] return[call[call[name[super], parameter[name[List], name[self]]].Add, parameter[name[value], <ast.Starred object at 0x7da1b1192ad0>]]]
keyword[def] identifier[Add] ( identifier[self] , identifier[value] ,* identifier[optional] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[TListItemWithOption] ): keyword[if] identifier[optional] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] literal[string] ) keyword[return] identifier[super] ( identifier[List] , identifier[self] ). identifier[Add] ( identifier[value] . identifier[item] , identifier[value] . identifier[option] ) keyword[return] identifier[super] ( identifier[List] , identifier[self] ). identifier[Add] ( identifier[value] ,* identifier[optional] )
def Add(self, value, *optional): """ Overload ROOT's basic TList::Add to support supplying TListItemWithOption """ if isinstance(value, TListItemWithOption): if optional: raise RuntimeError('option specified along with TListItemWithOption. Specify one or the other but not both.') # depends on [control=['if'], data=[]] return super(List, self).Add(value.item, value.option) # depends on [control=['if'], data=[]] return super(List, self).Add(value, *optional)
def set_classifier_mask(self, v, base_mask=True): """Computes the mask used to create the training and validation set""" base = self._base v = tonparray(v) a = np.unique(v) if a[0] != -1 or a[1] != 1: raise RuntimeError("The labels must be -1 and 1 (%s)" % a) mask = np.zeros_like(v) cnt = min([(v == x).sum() for x in a]) * base._tr_fraction cnt = int(round(cnt)) for i in a: index = np.where((v == i) & base_mask)[0] np.random.shuffle(index) mask[index[:cnt]] = True base._mask = SparseArray.fromlist(mask) return SparseArray.fromlist(v)
def function[set_classifier_mask, parameter[self, v, base_mask]]: constant[Computes the mask used to create the training and validation set] variable[base] assign[=] name[self]._base variable[v] assign[=] call[name[tonparray], parameter[name[v]]] variable[a] assign[=] call[name[np].unique, parameter[name[v]]] if <ast.BoolOp object at 0x7da204347130> begin[:] <ast.Raise object at 0x7da204344910> variable[mask] assign[=] call[name[np].zeros_like, parameter[name[v]]] variable[cnt] assign[=] binary_operation[call[name[min], parameter[<ast.ListComp object at 0x7da204346860>]] * name[base]._tr_fraction] variable[cnt] assign[=] call[name[int], parameter[call[name[round], parameter[name[cnt]]]]] for taget[name[i]] in starred[name[a]] begin[:] variable[index] assign[=] call[call[name[np].where, parameter[binary_operation[compare[name[v] equal[==] name[i]] <ast.BitAnd object at 0x7da2590d6b60> name[base_mask]]]]][constant[0]] call[name[np].random.shuffle, parameter[name[index]]] call[name[mask]][call[name[index]][<ast.Slice object at 0x7da204566b30>]] assign[=] constant[True] name[base]._mask assign[=] call[name[SparseArray].fromlist, parameter[name[mask]]] return[call[name[SparseArray].fromlist, parameter[name[v]]]]
keyword[def] identifier[set_classifier_mask] ( identifier[self] , identifier[v] , identifier[base_mask] = keyword[True] ): literal[string] identifier[base] = identifier[self] . identifier[_base] identifier[v] = identifier[tonparray] ( identifier[v] ) identifier[a] = identifier[np] . identifier[unique] ( identifier[v] ) keyword[if] identifier[a] [ literal[int] ]!=- literal[int] keyword[or] identifier[a] [ literal[int] ]!= literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[a] ) identifier[mask] = identifier[np] . identifier[zeros_like] ( identifier[v] ) identifier[cnt] = identifier[min] ([( identifier[v] == identifier[x] ). identifier[sum] () keyword[for] identifier[x] keyword[in] identifier[a] ])* identifier[base] . identifier[_tr_fraction] identifier[cnt] = identifier[int] ( identifier[round] ( identifier[cnt] )) keyword[for] identifier[i] keyword[in] identifier[a] : identifier[index] = identifier[np] . identifier[where] (( identifier[v] == identifier[i] )& identifier[base_mask] )[ literal[int] ] identifier[np] . identifier[random] . identifier[shuffle] ( identifier[index] ) identifier[mask] [ identifier[index] [: identifier[cnt] ]]= keyword[True] identifier[base] . identifier[_mask] = identifier[SparseArray] . identifier[fromlist] ( identifier[mask] ) keyword[return] identifier[SparseArray] . identifier[fromlist] ( identifier[v] )
def set_classifier_mask(self, v, base_mask=True): """Computes the mask used to create the training and validation set""" base = self._base v = tonparray(v) a = np.unique(v) if a[0] != -1 or a[1] != 1: raise RuntimeError('The labels must be -1 and 1 (%s)' % a) # depends on [control=['if'], data=[]] mask = np.zeros_like(v) cnt = min([(v == x).sum() for x in a]) * base._tr_fraction cnt = int(round(cnt)) for i in a: index = np.where((v == i) & base_mask)[0] np.random.shuffle(index) mask[index[:cnt]] = True # depends on [control=['for'], data=['i']] base._mask = SparseArray.fromlist(mask) return SparseArray.fromlist(v)
def hide_defaults(self): """Removes fields' values that are the same as default values.""" # use list(): self.fields is modified in the loop for k, v in list(six.iteritems(self.fields)): v = self.fields[k] if k in self.default_fields: if self.default_fields[k] == v: del self.fields[k] self.payload.hide_defaults()
def function[hide_defaults, parameter[self]]: constant[Removes fields' values that are the same as default values.] for taget[tuple[[<ast.Name object at 0x7da1b2195e40>, <ast.Name object at 0x7da1b2194490>]]] in starred[call[name[list], parameter[call[name[six].iteritems, parameter[name[self].fields]]]]] begin[:] variable[v] assign[=] call[name[self].fields][name[k]] if compare[name[k] in name[self].default_fields] begin[:] if compare[call[name[self].default_fields][name[k]] equal[==] name[v]] begin[:] <ast.Delete object at 0x7da1b21a2560> call[name[self].payload.hide_defaults, parameter[]]
keyword[def] identifier[hide_defaults] ( identifier[self] ): literal[string] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[six] . identifier[iteritems] ( identifier[self] . identifier[fields] )): identifier[v] = identifier[self] . identifier[fields] [ identifier[k] ] keyword[if] identifier[k] keyword[in] identifier[self] . identifier[default_fields] : keyword[if] identifier[self] . identifier[default_fields] [ identifier[k] ]== identifier[v] : keyword[del] identifier[self] . identifier[fields] [ identifier[k] ] identifier[self] . identifier[payload] . identifier[hide_defaults] ()
def hide_defaults(self): """Removes fields' values that are the same as default values.""" # use list(): self.fields is modified in the loop for (k, v) in list(six.iteritems(self.fields)): v = self.fields[k] if k in self.default_fields: if self.default_fields[k] == v: del self.fields[k] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]] self.payload.hide_defaults()
def _ParseRecurseKeys(self, parser_mediator, root_key): """Parses the Registry keys recursively. Args: parser_mediator (ParserMediator): parser mediator. root_key (dfwinreg.WinRegistryKey): root Windows Registry key. """ for registry_key in root_key.RecurseKeys(): if parser_mediator.abort: break self._ParseKey(parser_mediator, registry_key)
def function[_ParseRecurseKeys, parameter[self, parser_mediator, root_key]]: constant[Parses the Registry keys recursively. Args: parser_mediator (ParserMediator): parser mediator. root_key (dfwinreg.WinRegistryKey): root Windows Registry key. ] for taget[name[registry_key]] in starred[call[name[root_key].RecurseKeys, parameter[]]] begin[:] if name[parser_mediator].abort begin[:] break call[name[self]._ParseKey, parameter[name[parser_mediator], name[registry_key]]]
keyword[def] identifier[_ParseRecurseKeys] ( identifier[self] , identifier[parser_mediator] , identifier[root_key] ): literal[string] keyword[for] identifier[registry_key] keyword[in] identifier[root_key] . identifier[RecurseKeys] (): keyword[if] identifier[parser_mediator] . identifier[abort] : keyword[break] identifier[self] . identifier[_ParseKey] ( identifier[parser_mediator] , identifier[registry_key] )
def _ParseRecurseKeys(self, parser_mediator, root_key): """Parses the Registry keys recursively. Args: parser_mediator (ParserMediator): parser mediator. root_key (dfwinreg.WinRegistryKey): root Windows Registry key. """ for registry_key in root_key.RecurseKeys(): if parser_mediator.abort: break # depends on [control=['if'], data=[]] self._ParseKey(parser_mediator, registry_key) # depends on [control=['for'], data=['registry_key']]
def GetHostMemMappedMB(self): '''Undocumented.''' counter = c_uint() ret = vmGuestLib.VMGuestLib_GetHostMemMappedMB(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret) return counter.value
def function[GetHostMemMappedMB, parameter[self]]: constant[Undocumented.] variable[counter] assign[=] call[name[c_uint], parameter[]] variable[ret] assign[=] call[name[vmGuestLib].VMGuestLib_GetHostMemMappedMB, parameter[name[self].handle.value, call[name[byref], parameter[name[counter]]]]] if compare[name[ret] not_equal[!=] name[VMGUESTLIB_ERROR_SUCCESS]] begin[:] <ast.Raise object at 0x7da18f723a00> return[name[counter].value]
keyword[def] identifier[GetHostMemMappedMB] ( identifier[self] ): literal[string] identifier[counter] = identifier[c_uint] () identifier[ret] = identifier[vmGuestLib] . identifier[VMGuestLib_GetHostMemMappedMB] ( identifier[self] . identifier[handle] . identifier[value] , identifier[byref] ( identifier[counter] )) keyword[if] identifier[ret] != identifier[VMGUESTLIB_ERROR_SUCCESS] : keyword[raise] identifier[VMGuestLibException] ( identifier[ret] ) keyword[return] identifier[counter] . identifier[value]
def GetHostMemMappedMB(self): """Undocumented.""" counter = c_uint() ret = vmGuestLib.VMGuestLib_GetHostMemMappedMB(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret) # depends on [control=['if'], data=['ret']] return counter.value
def kl(self): r'''Thermal conductivity of the mixture in the liquid phase at its current temperature, pressure, and composition in units of [Pa*s]. For calculation of this property at other temperatures and pressures, or specifying manually the method used to calculate it, and more - see the object oriented interface :obj:`thermo.thermal_conductivity.ThermalConductivityLiquidMixture`; each Mixture instance creates one to actually perform the calculations. Examples -------- >>> Mixture(['water'], ws=[1], T=320).kl 0.6369957248212118 ''' return self.ThermalConductivityLiquidMixture(self.T, self.P, self.zs, self.ws)
def function[kl, parameter[self]]: constant[Thermal conductivity of the mixture in the liquid phase at its current temperature, pressure, and composition in units of [Pa*s]. For calculation of this property at other temperatures and pressures, or specifying manually the method used to calculate it, and more - see the object oriented interface :obj:`thermo.thermal_conductivity.ThermalConductivityLiquidMixture`; each Mixture instance creates one to actually perform the calculations. Examples -------- >>> Mixture(['water'], ws=[1], T=320).kl 0.6369957248212118 ] return[call[name[self].ThermalConductivityLiquidMixture, parameter[name[self].T, name[self].P, name[self].zs, name[self].ws]]]
keyword[def] identifier[kl] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[ThermalConductivityLiquidMixture] ( identifier[self] . identifier[T] , identifier[self] . identifier[P] , identifier[self] . identifier[zs] , identifier[self] . identifier[ws] )
def kl(self): """Thermal conductivity of the mixture in the liquid phase at its current temperature, pressure, and composition in units of [Pa*s]. For calculation of this property at other temperatures and pressures, or specifying manually the method used to calculate it, and more - see the object oriented interface :obj:`thermo.thermal_conductivity.ThermalConductivityLiquidMixture`; each Mixture instance creates one to actually perform the calculations. Examples -------- >>> Mixture(['water'], ws=[1], T=320).kl 0.6369957248212118 """ return self.ThermalConductivityLiquidMixture(self.T, self.P, self.zs, self.ws)
def _interval_sum(interval, start=None, end=None, context=None): """ Return sum of intervals between "R"esume and "P"aused events in C{interval}, optionally limited by a time window defined by C{start} and C{end}. Return ``None`` if there's no sensible information. C{interval} is a series of event types and timestamps, e.g. "R1283008245P1283008268". """ end = float(end) if end else time.time() events = _interval_split(interval, context=context) result = [] ##import sys; print >>sys.stderr, "!!!!!isum", interval.fetch("custom_activations"), events, start, end while events: event, resumed = events.pop() ##print "~~~~~~~~~~", context, event, resumed if event != "R": # Ignore other events continue resumed = max(resumed, start or resumed) if events: # Further events? if not events[-1][0] == "P": continue # If not followed by "P", it's not a valid interval _, paused = events.pop() paused = min(paused, end) else: # Currently active, ends at time window paused = end ##print "~~~~~~~~~~ R: %r, P: %r" % (resumed, paused) ##print "~~~~~~~~~~ I: %r" % (paused - resumed) if resumed >= paused: # Ignore empty intervals continue result.append(paused - resumed) return sum(result) if result else None
def function[_interval_sum, parameter[interval, start, end, context]]: constant[ Return sum of intervals between "R"esume and "P"aused events in C{interval}, optionally limited by a time window defined by C{start} and C{end}. Return ``None`` if there's no sensible information. C{interval} is a series of event types and timestamps, e.g. "R1283008245P1283008268". ] variable[end] assign[=] <ast.IfExp object at 0x7da2044c0460> variable[events] assign[=] call[name[_interval_split], parameter[name[interval]]] variable[result] assign[=] list[[]] while name[events] begin[:] <ast.Tuple object at 0x7da2044c0340> assign[=] call[name[events].pop, parameter[]] if compare[name[event] not_equal[!=] constant[R]] begin[:] continue variable[resumed] assign[=] call[name[max], parameter[name[resumed], <ast.BoolOp object at 0x7da1b13e8fd0>]] if name[events] begin[:] if <ast.UnaryOp object at 0x7da1b13e8fa0> begin[:] continue <ast.Tuple object at 0x7da1b13e9690> assign[=] call[name[events].pop, parameter[]] variable[paused] assign[=] call[name[min], parameter[name[paused], name[end]]] if compare[name[resumed] greater_or_equal[>=] name[paused]] begin[:] continue call[name[result].append, parameter[binary_operation[name[paused] - name[resumed]]]] return[<ast.IfExp object at 0x7da1b1390430>]
keyword[def] identifier[_interval_sum] ( identifier[interval] , identifier[start] = keyword[None] , identifier[end] = keyword[None] , identifier[context] = keyword[None] ): literal[string] identifier[end] = identifier[float] ( identifier[end] ) keyword[if] identifier[end] keyword[else] identifier[time] . identifier[time] () identifier[events] = identifier[_interval_split] ( identifier[interval] , identifier[context] = identifier[context] ) identifier[result] =[] keyword[while] identifier[events] : identifier[event] , identifier[resumed] = identifier[events] . identifier[pop] () keyword[if] identifier[event] != literal[string] : keyword[continue] identifier[resumed] = identifier[max] ( identifier[resumed] , identifier[start] keyword[or] identifier[resumed] ) keyword[if] identifier[events] : keyword[if] keyword[not] identifier[events] [- literal[int] ][ literal[int] ]== literal[string] : keyword[continue] identifier[_] , identifier[paused] = identifier[events] . identifier[pop] () identifier[paused] = identifier[min] ( identifier[paused] , identifier[end] ) keyword[else] : identifier[paused] = identifier[end] keyword[if] identifier[resumed] >= identifier[paused] : keyword[continue] identifier[result] . identifier[append] ( identifier[paused] - identifier[resumed] ) keyword[return] identifier[sum] ( identifier[result] ) keyword[if] identifier[result] keyword[else] keyword[None]
def _interval_sum(interval, start=None, end=None, context=None): """ Return sum of intervals between "R"esume and "P"aused events in C{interval}, optionally limited by a time window defined by C{start} and C{end}. Return ``None`` if there's no sensible information. C{interval} is a series of event types and timestamps, e.g. "R1283008245P1283008268". """ end = float(end) if end else time.time() events = _interval_split(interval, context=context) result = [] ##import sys; print >>sys.stderr, "!!!!!isum", interval.fetch("custom_activations"), events, start, end while events: (event, resumed) = events.pop() ##print "~~~~~~~~~~", context, event, resumed if event != 'R': # Ignore other events continue # depends on [control=['if'], data=[]] resumed = max(resumed, start or resumed) if events: # Further events? if not events[-1][0] == 'P': continue # If not followed by "P", it's not a valid interval # depends on [control=['if'], data=[]] (_, paused) = events.pop() paused = min(paused, end) # depends on [control=['if'], data=[]] else: # Currently active, ends at time window paused = end ##print "~~~~~~~~~~ R: %r, P: %r" % (resumed, paused) ##print "~~~~~~~~~~ I: %r" % (paused - resumed) if resumed >= paused: # Ignore empty intervals continue # depends on [control=['if'], data=[]] result.append(paused - resumed) # depends on [control=['while'], data=[]] return sum(result) if result else None
def transfer_pos_tags(self): """Returns an list of tuples of the form (word, POS tag), using transfer POS tagger""" tagged_words = [] for word,t in self.transfer_pos_tagger.annotate(self.words): word.pos_tag = t tagged_words.append((word, t)) return tagged_words
def function[transfer_pos_tags, parameter[self]]: constant[Returns an list of tuples of the form (word, POS tag), using transfer POS tagger] variable[tagged_words] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20cabeb30>, <ast.Name object at 0x7da20cabdb10>]]] in starred[call[name[self].transfer_pos_tagger.annotate, parameter[name[self].words]]] begin[:] name[word].pos_tag assign[=] name[t] call[name[tagged_words].append, parameter[tuple[[<ast.Name object at 0x7da18f09df90>, <ast.Name object at 0x7da18f09cca0>]]]] return[name[tagged_words]]
keyword[def] identifier[transfer_pos_tags] ( identifier[self] ): literal[string] identifier[tagged_words] =[] keyword[for] identifier[word] , identifier[t] keyword[in] identifier[self] . identifier[transfer_pos_tagger] . identifier[annotate] ( identifier[self] . identifier[words] ): identifier[word] . identifier[pos_tag] = identifier[t] identifier[tagged_words] . identifier[append] (( identifier[word] , identifier[t] )) keyword[return] identifier[tagged_words]
def transfer_pos_tags(self): """Returns an list of tuples of the form (word, POS tag), using transfer POS tagger""" tagged_words = [] for (word, t) in self.transfer_pos_tagger.annotate(self.words): word.pos_tag = t tagged_words.append((word, t)) # depends on [control=['for'], data=[]] return tagged_words
def available(name): ''' Return True if the named service is available. CLI Example: .. code-block:: bash salt '*' service.available sshd ''' cmd = '{0} get {1}'.format(_cmd(), name) if __salt__['cmd.retcode'](cmd) == 2: return False return True
def function[available, parameter[name]]: constant[ Return True if the named service is available. CLI Example: .. code-block:: bash salt '*' service.available sshd ] variable[cmd] assign[=] call[constant[{0} get {1}].format, parameter[call[name[_cmd], parameter[]], name[name]]] if compare[call[call[name[__salt__]][constant[cmd.retcode]], parameter[name[cmd]]] equal[==] constant[2]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[available] ( identifier[name] ): literal[string] identifier[cmd] = literal[string] . identifier[format] ( identifier[_cmd] (), identifier[name] ) keyword[if] identifier[__salt__] [ literal[string] ]( identifier[cmd] )== literal[int] : keyword[return] keyword[False] keyword[return] keyword[True]
def available(name): """ Return True if the named service is available. CLI Example: .. code-block:: bash salt '*' service.available sshd """ cmd = '{0} get {1}'.format(_cmd(), name) if __salt__['cmd.retcode'](cmd) == 2: return False # depends on [control=['if'], data=[]] return True
def apply_one_hot_encoding(self, one_hot_encoding): """Apply one hot encoding to generate a specific config. Arguments: one_hot_encoding (list): A list of one hot encodings, 1 for each parameter. The shape of each encoding should match that ``ParameterSpace`` Returns: A dict config with specific <name, value> pair """ config = {} for ps, one_hot in zip(self.param_list, one_hot_encoding): index = np.argmax(one_hot) config[ps.name] = ps.choices[index] return config
def function[apply_one_hot_encoding, parameter[self, one_hot_encoding]]: constant[Apply one hot encoding to generate a specific config. Arguments: one_hot_encoding (list): A list of one hot encodings, 1 for each parameter. The shape of each encoding should match that ``ParameterSpace`` Returns: A dict config with specific <name, value> pair ] variable[config] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18f09ff40>, <ast.Name object at 0x7da18f09e860>]]] in starred[call[name[zip], parameter[name[self].param_list, name[one_hot_encoding]]]] begin[:] variable[index] assign[=] call[name[np].argmax, parameter[name[one_hot]]] call[name[config]][name[ps].name] assign[=] call[name[ps].choices][name[index]] return[name[config]]
keyword[def] identifier[apply_one_hot_encoding] ( identifier[self] , identifier[one_hot_encoding] ): literal[string] identifier[config] ={} keyword[for] identifier[ps] , identifier[one_hot] keyword[in] identifier[zip] ( identifier[self] . identifier[param_list] , identifier[one_hot_encoding] ): identifier[index] = identifier[np] . identifier[argmax] ( identifier[one_hot] ) identifier[config] [ identifier[ps] . identifier[name] ]= identifier[ps] . identifier[choices] [ identifier[index] ] keyword[return] identifier[config]
def apply_one_hot_encoding(self, one_hot_encoding): """Apply one hot encoding to generate a specific config. Arguments: one_hot_encoding (list): A list of one hot encodings, 1 for each parameter. The shape of each encoding should match that ``ParameterSpace`` Returns: A dict config with specific <name, value> pair """ config = {} for (ps, one_hot) in zip(self.param_list, one_hot_encoding): index = np.argmax(one_hot) config[ps.name] = ps.choices[index] # depends on [control=['for'], data=[]] return config
def addHandler(name, basepath=None, baseurl=None, allowDownscale=False): """Add an event handler with given name.""" if basepath is None: basepath = '.' _handlers.append(_handler_classes[name](basepath, baseurl, allowDownscale))
def function[addHandler, parameter[name, basepath, baseurl, allowDownscale]]: constant[Add an event handler with given name.] if compare[name[basepath] is constant[None]] begin[:] variable[basepath] assign[=] constant[.] call[name[_handlers].append, parameter[call[call[name[_handler_classes]][name[name]], parameter[name[basepath], name[baseurl], name[allowDownscale]]]]]
keyword[def] identifier[addHandler] ( identifier[name] , identifier[basepath] = keyword[None] , identifier[baseurl] = keyword[None] , identifier[allowDownscale] = keyword[False] ): literal[string] keyword[if] identifier[basepath] keyword[is] keyword[None] : identifier[basepath] = literal[string] identifier[_handlers] . identifier[append] ( identifier[_handler_classes] [ identifier[name] ]( identifier[basepath] , identifier[baseurl] , identifier[allowDownscale] ))
def addHandler(name, basepath=None, baseurl=None, allowDownscale=False): """Add an event handler with given name.""" if basepath is None: basepath = '.' # depends on [control=['if'], data=['basepath']] _handlers.append(_handler_classes[name](basepath, baseurl, allowDownscale))
def delete_model(self, key, ignoreMissingKey=True, timeoutSecs=60, **kwargs): ''' Delete a model on the h2o cluster, given its key. ''' assert key is not None, '"key" parameter is null' result = self.do_json_request('/3/Models.json/' + key, cmd='delete', timeout=timeoutSecs) # TODO: look for what? if not ignoreMissingKey and 'f00b4r' in result: raise ValueError('Model key not found: ' + key) verboseprint("delete_model result:", dump_json(result)) return result
def function[delete_model, parameter[self, key, ignoreMissingKey, timeoutSecs]]: constant[ Delete a model on the h2o cluster, given its key. ] assert[compare[name[key] is_not constant[None]]] variable[result] assign[=] call[name[self].do_json_request, parameter[binary_operation[constant[/3/Models.json/] + name[key]]]] if <ast.BoolOp object at 0x7da2054a6c50> begin[:] <ast.Raise object at 0x7da2054a68c0> call[name[verboseprint], parameter[constant[delete_model result:], call[name[dump_json], parameter[name[result]]]]] return[name[result]]
keyword[def] identifier[delete_model] ( identifier[self] , identifier[key] , identifier[ignoreMissingKey] = keyword[True] , identifier[timeoutSecs] = literal[int] ,** identifier[kwargs] ): literal[string] keyword[assert] identifier[key] keyword[is] keyword[not] keyword[None] , literal[string] identifier[result] = identifier[self] . identifier[do_json_request] ( literal[string] + identifier[key] , identifier[cmd] = literal[string] , identifier[timeout] = identifier[timeoutSecs] ) keyword[if] keyword[not] identifier[ignoreMissingKey] keyword[and] literal[string] keyword[in] identifier[result] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[key] ) identifier[verboseprint] ( literal[string] , identifier[dump_json] ( identifier[result] )) keyword[return] identifier[result]
def delete_model(self, key, ignoreMissingKey=True, timeoutSecs=60, **kwargs): """ Delete a model on the h2o cluster, given its key. """ assert key is not None, '"key" parameter is null' result = self.do_json_request('/3/Models.json/' + key, cmd='delete', timeout=timeoutSecs) # TODO: look for what? if not ignoreMissingKey and 'f00b4r' in result: raise ValueError('Model key not found: ' + key) # depends on [control=['if'], data=[]] verboseprint('delete_model result:', dump_json(result)) return result
def summarize_sv(items): """CWL target: summarize structural variants for multiple samples. XXX Need to support non-VCF output as tabix indexed output """ items = [utils.to_single_data(x) for x in vcvalidate.summarize_grading(items, "svvalidate")] out = {"sv": {"calls": [], "supplemental": [], "prioritize": {"tsv": [], "raw": []}}, "svvalidate": vcvalidate.combine_validations(items, "svvalidate")} added = set([]) # Standard callers for data in items: if data.get("sv"): if data["sv"].get("vrn_file"): ext = utils.splitext_plus(data["sv"]["vrn_file"])[-1] cur_name = _useful_basename(data) if cur_name not in added and ext.startswith(".vcf"): added.add(cur_name) out_file = os.path.join(utils.safe_makedir(os.path.join(dd.get_work_dir(data), "sv", "calls")), "%s%s" % (cur_name, ext)) utils.copy_plus(data["sv"]["vrn_file"], out_file) out_file = vcfutils.bgzip_and_index(out_file, data["config"]) out["sv"]["calls"].append(out_file) if data["sv"].get("supplemental"): out["sv"]["supplemental"].extend([x for x in data["sv"]["supplemental"] if x]) # prioritization for pdata in _group_by_sample(items): prioritysv = [x for x in prioritize.run([utils.deepish_copy(pdata)])[0].get("sv", []) if x["variantcaller"] == "sv-prioritize"] if prioritysv: out["sv"]["prioritize"]["tsv"].append(prioritysv[0]["vrn_file"]) out["sv"]["prioritize"]["raw"].extend(prioritysv[0]["raw_files"].values()) return [out]
def function[summarize_sv, parameter[items]]: constant[CWL target: summarize structural variants for multiple samples. XXX Need to support non-VCF output as tabix indexed output ] variable[items] assign[=] <ast.ListComp object at 0x7da1b17a5750> variable[out] assign[=] dictionary[[<ast.Constant object at 0x7da1b17a6b00>, <ast.Constant object at 0x7da1b17a5a80>], [<ast.Dict object at 0x7da1b17a6f50>, <ast.Call object at 0x7da1b17a5bd0>]] variable[added] assign[=] call[name[set], parameter[list[[]]]] for taget[name[data]] in starred[name[items]] begin[:] if call[name[data].get, parameter[constant[sv]]] begin[:] if call[call[name[data]][constant[sv]].get, parameter[constant[vrn_file]]] begin[:] variable[ext] assign[=] call[call[name[utils].splitext_plus, parameter[call[call[name[data]][constant[sv]]][constant[vrn_file]]]]][<ast.UnaryOp object at 0x7da1b17a6ad0>] variable[cur_name] assign[=] call[name[_useful_basename], parameter[name[data]]] if <ast.BoolOp object at 0x7da1b17a7e50> begin[:] call[name[added].add, parameter[name[cur_name]]] variable[out_file] assign[=] call[name[os].path.join, parameter[call[name[utils].safe_makedir, parameter[call[name[os].path.join, parameter[call[name[dd].get_work_dir, parameter[name[data]]], constant[sv], constant[calls]]]]], binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b17a4310>, <ast.Name object at 0x7da1b17a7f70>]]]]] call[name[utils].copy_plus, parameter[call[call[name[data]][constant[sv]]][constant[vrn_file]], name[out_file]]] variable[out_file] assign[=] call[name[vcfutils].bgzip_and_index, parameter[name[out_file], call[name[data]][constant[config]]]] call[call[call[name[out]][constant[sv]]][constant[calls]].append, parameter[name[out_file]]] if call[call[name[data]][constant[sv]].get, parameter[constant[supplemental]]] begin[:] call[call[call[name[out]][constant[sv]]][constant[supplemental]].extend, parameter[<ast.ListComp object at 0x7da18f00c3a0>]] for taget[name[pdata]] in starred[call[name[_group_by_sample], parameter[name[items]]]] begin[:] variable[prioritysv] assign[=] <ast.ListComp object at 0x7da18f00d8a0> if name[prioritysv] begin[:] call[call[call[call[name[out]][constant[sv]]][constant[prioritize]]][constant[tsv]].append, parameter[call[call[name[prioritysv]][constant[0]]][constant[vrn_file]]]] call[call[call[call[name[out]][constant[sv]]][constant[prioritize]]][constant[raw]].extend, parameter[call[call[call[name[prioritysv]][constant[0]]][constant[raw_files]].values, parameter[]]]] return[list[[<ast.Name object at 0x7da1b2347fa0>]]]
keyword[def] identifier[summarize_sv] ( identifier[items] ): literal[string] identifier[items] =[ identifier[utils] . identifier[to_single_data] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[vcvalidate] . identifier[summarize_grading] ( identifier[items] , literal[string] )] identifier[out] ={ literal[string] :{ literal[string] :[], literal[string] :[], literal[string] :{ literal[string] :[], literal[string] :[]}}, literal[string] : identifier[vcvalidate] . identifier[combine_validations] ( identifier[items] , literal[string] )} identifier[added] = identifier[set] ([]) keyword[for] identifier[data] keyword[in] identifier[items] : keyword[if] identifier[data] . identifier[get] ( literal[string] ): keyword[if] identifier[data] [ literal[string] ]. identifier[get] ( literal[string] ): identifier[ext] = identifier[utils] . identifier[splitext_plus] ( identifier[data] [ literal[string] ][ literal[string] ])[- literal[int] ] identifier[cur_name] = identifier[_useful_basename] ( identifier[data] ) keyword[if] identifier[cur_name] keyword[not] keyword[in] identifier[added] keyword[and] identifier[ext] . identifier[startswith] ( literal[string] ): identifier[added] . identifier[add] ( identifier[cur_name] ) identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dd] . identifier[get_work_dir] ( identifier[data] ), literal[string] , literal[string] )), literal[string] %( identifier[cur_name] , identifier[ext] )) identifier[utils] . identifier[copy_plus] ( identifier[data] [ literal[string] ][ literal[string] ], identifier[out_file] ) identifier[out_file] = identifier[vcfutils] . identifier[bgzip_and_index] ( identifier[out_file] , identifier[data] [ literal[string] ]) identifier[out] [ literal[string] ][ literal[string] ]. identifier[append] ( identifier[out_file] ) keyword[if] identifier[data] [ literal[string] ]. identifier[get] ( literal[string] ): identifier[out] [ literal[string] ][ literal[string] ]. identifier[extend] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[data] [ literal[string] ][ literal[string] ] keyword[if] identifier[x] ]) keyword[for] identifier[pdata] keyword[in] identifier[_group_by_sample] ( identifier[items] ): identifier[prioritysv] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[prioritize] . identifier[run] ([ identifier[utils] . identifier[deepish_copy] ( identifier[pdata] )])[ literal[int] ]. identifier[get] ( literal[string] ,[]) keyword[if] identifier[x] [ literal[string] ]== literal[string] ] keyword[if] identifier[prioritysv] : identifier[out] [ literal[string] ][ literal[string] ][ literal[string] ]. identifier[append] ( identifier[prioritysv] [ literal[int] ][ literal[string] ]) identifier[out] [ literal[string] ][ literal[string] ][ literal[string] ]. identifier[extend] ( identifier[prioritysv] [ literal[int] ][ literal[string] ]. identifier[values] ()) keyword[return] [ identifier[out] ]
def summarize_sv(items): """CWL target: summarize structural variants for multiple samples. XXX Need to support non-VCF output as tabix indexed output """ items = [utils.to_single_data(x) for x in vcvalidate.summarize_grading(items, 'svvalidate')] out = {'sv': {'calls': [], 'supplemental': [], 'prioritize': {'tsv': [], 'raw': []}}, 'svvalidate': vcvalidate.combine_validations(items, 'svvalidate')} added = set([]) # Standard callers for data in items: if data.get('sv'): if data['sv'].get('vrn_file'): ext = utils.splitext_plus(data['sv']['vrn_file'])[-1] cur_name = _useful_basename(data) if cur_name not in added and ext.startswith('.vcf'): added.add(cur_name) out_file = os.path.join(utils.safe_makedir(os.path.join(dd.get_work_dir(data), 'sv', 'calls')), '%s%s' % (cur_name, ext)) utils.copy_plus(data['sv']['vrn_file'], out_file) out_file = vcfutils.bgzip_and_index(out_file, data['config']) out['sv']['calls'].append(out_file) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if data['sv'].get('supplemental'): out['sv']['supplemental'].extend([x for x in data['sv']['supplemental'] if x]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['data']] # prioritization for pdata in _group_by_sample(items): prioritysv = [x for x in prioritize.run([utils.deepish_copy(pdata)])[0].get('sv', []) if x['variantcaller'] == 'sv-prioritize'] if prioritysv: out['sv']['prioritize']['tsv'].append(prioritysv[0]['vrn_file']) out['sv']['prioritize']['raw'].extend(prioritysv[0]['raw_files'].values()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pdata']] return [out]
def rethrow(self, msg, _type=InvalidResourceException): """ Raises an exception with custom type and modified error message. Raised exception is based on current exc_info() and carries it's traceback @type msg: str @param msg: New error message @type _type: type @param _type: Reraised exception type @raises: Exception """ exc_type, exc_value, exc_traceback = sys.exc_info() msg = msg + \ "\nOriginal message: {0} {1}".format(exc_type.__name__, exc_value) raise _type(msg)
def function[rethrow, parameter[self, msg, _type]]: constant[ Raises an exception with custom type and modified error message. Raised exception is based on current exc_info() and carries it's traceback @type msg: str @param msg: New error message @type _type: type @param _type: Reraised exception type @raises: Exception ] <ast.Tuple object at 0x7da1b209e920> assign[=] call[name[sys].exc_info, parameter[]] variable[msg] assign[=] binary_operation[name[msg] + call[constant[ Original message: {0} {1}].format, parameter[name[exc_type].__name__, name[exc_value]]]] <ast.Raise object at 0x7da1b209e230>
keyword[def] identifier[rethrow] ( identifier[self] , identifier[msg] , identifier[_type] = identifier[InvalidResourceException] ): literal[string] identifier[exc_type] , identifier[exc_value] , identifier[exc_traceback] = identifier[sys] . identifier[exc_info] () identifier[msg] = identifier[msg] + literal[string] . identifier[format] ( identifier[exc_type] . identifier[__name__] , identifier[exc_value] ) keyword[raise] identifier[_type] ( identifier[msg] )
def rethrow(self, msg, _type=InvalidResourceException): """ Raises an exception with custom type and modified error message. Raised exception is based on current exc_info() and carries it's traceback @type msg: str @param msg: New error message @type _type: type @param _type: Reraised exception type @raises: Exception """ (exc_type, exc_value, exc_traceback) = sys.exc_info() msg = msg + '\nOriginal message: {0} {1}'.format(exc_type.__name__, exc_value) raise _type(msg)