code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def assert_key_type_value(self, context_item, caller, extra_error_text=''): """Assert that keys exist of right type and has a value. Args: context_item: ContextItemInfo tuple caller: string. calling function name - this used to construct error messages extra_error_text: append to end of error message. Raises: AssertionError: if context_item None. KeyNotInContextError: Key doesn't exist KeyInContextHasNoValueError: context[key] is None or the wrong type. """ assert context_item, ("context_item parameter must be specified.") if extra_error_text is None or extra_error_text == '': append_error_text = '' else: append_error_text = f' {extra_error_text}' if not context_item.key_in_context: raise KeyNotInContextError(f'{caller} couldn\'t find ' f'{context_item.key} in context.' f'{append_error_text}') if not context_item.has_value: raise KeyInContextHasNoValueError( f'{caller} found {context_item.key} in ' f'context but it doesn\'t have a value.' f'{append_error_text}') if not context_item.is_expected_type: raise KeyInContextHasNoValueError( f'{caller} found {context_item.key} in context, but it\'s ' f'not a {context_item.expected_type}.' f'{append_error_text}')
def function[assert_key_type_value, parameter[self, context_item, caller, extra_error_text]]: constant[Assert that keys exist of right type and has a value. Args: context_item: ContextItemInfo tuple caller: string. calling function name - this used to construct error messages extra_error_text: append to end of error message. Raises: AssertionError: if context_item None. KeyNotInContextError: Key doesn't exist KeyInContextHasNoValueError: context[key] is None or the wrong type. ] assert[name[context_item]] if <ast.BoolOp object at 0x7da204566290> begin[:] variable[append_error_text] assign[=] constant[] if <ast.UnaryOp object at 0x7da20c6aa2f0> begin[:] <ast.Raise object at 0x7da20c6aad70> if <ast.UnaryOp object at 0x7da20c6a9ed0> begin[:] <ast.Raise object at 0x7da20c6ab880> if <ast.UnaryOp object at 0x7da20c6a93f0> begin[:] <ast.Raise object at 0x7da20c6a82e0>
keyword[def] identifier[assert_key_type_value] ( identifier[self] , identifier[context_item] , identifier[caller] , identifier[extra_error_text] = literal[string] ): literal[string] keyword[assert] identifier[context_item] ,( literal[string] ) keyword[if] identifier[extra_error_text] keyword[is] keyword[None] keyword[or] identifier[extra_error_text] == literal[string] : identifier[append_error_text] = literal[string] keyword[else] : identifier[append_error_text] = literal[string] keyword[if] keyword[not] identifier[context_item] . identifier[key_in_context] : keyword[raise] identifier[KeyNotInContextError] ( literal[string] literal[string] literal[string] ) keyword[if] keyword[not] identifier[context_item] . identifier[has_value] : keyword[raise] identifier[KeyInContextHasNoValueError] ( literal[string] literal[string] literal[string] ) keyword[if] keyword[not] identifier[context_item] . identifier[is_expected_type] : keyword[raise] identifier[KeyInContextHasNoValueError] ( literal[string] literal[string] literal[string] )
def assert_key_type_value(self, context_item, caller, extra_error_text=''): """Assert that keys exist of right type and has a value. Args: context_item: ContextItemInfo tuple caller: string. calling function name - this used to construct error messages extra_error_text: append to end of error message. Raises: AssertionError: if context_item None. KeyNotInContextError: Key doesn't exist KeyInContextHasNoValueError: context[key] is None or the wrong type. """ assert context_item, 'context_item parameter must be specified.' if extra_error_text is None or extra_error_text == '': append_error_text = '' # depends on [control=['if'], data=[]] else: append_error_text = f' {extra_error_text}' if not context_item.key_in_context: raise KeyNotInContextError(f"{caller} couldn't find {context_item.key} in context.{append_error_text}") # depends on [control=['if'], data=[]] if not context_item.has_value: raise KeyInContextHasNoValueError(f"{caller} found {context_item.key} in context but it doesn't have a value.{append_error_text}") # depends on [control=['if'], data=[]] if not context_item.is_expected_type: raise KeyInContextHasNoValueError(f"{caller} found {context_item.key} in context, but it's not a {context_item.expected_type}.{append_error_text}") # depends on [control=['if'], data=[]]
def show_ahrs_calibration(clb_upi, version='3'): """Show AHRS calibration data for given `clb_upi`.""" db = DBManager() ahrs_upi = clbupi2ahrsupi(clb_upi) print("AHRS UPI: {}".format(ahrs_upi)) content = db._get_content("show_product_test.htm?upi={0}&" "testtype=AHRS-CALIBRATION-v{1}&n=1&out=xml" .format(ahrs_upi, version)) \ .replace('\n', '') import xml.etree.ElementTree as ET try: root = ET.parse(io.StringIO(content)).getroot() except ET.ParseError: print("No calibration data found") else: for child in root: print("{}: {}".format(child.tag, child.text)) names = [c.text for c in root.findall(".//Name")] values = [[i.text for i in c] for c in root.findall(".//Values")] for name, value in zip(names, values): print("{}: {}".format(name, value))
def function[show_ahrs_calibration, parameter[clb_upi, version]]: constant[Show AHRS calibration data for given `clb_upi`.] variable[db] assign[=] call[name[DBManager], parameter[]] variable[ahrs_upi] assign[=] call[name[clbupi2ahrsupi], parameter[name[clb_upi]]] call[name[print], parameter[call[constant[AHRS UPI: {}].format, parameter[name[ahrs_upi]]]]] variable[content] assign[=] call[call[name[db]._get_content, parameter[call[constant[show_product_test.htm?upi={0}&testtype=AHRS-CALIBRATION-v{1}&n=1&out=xml].format, parameter[name[ahrs_upi], name[version]]]]].replace, parameter[constant[ ], constant[]]] import module[xml.etree.ElementTree] as alias[ET] <ast.Try object at 0x7da20c6c4970>
keyword[def] identifier[show_ahrs_calibration] ( identifier[clb_upi] , identifier[version] = literal[string] ): literal[string] identifier[db] = identifier[DBManager] () identifier[ahrs_upi] = identifier[clbupi2ahrsupi] ( identifier[clb_upi] ) identifier[print] ( literal[string] . identifier[format] ( identifier[ahrs_upi] )) identifier[content] = identifier[db] . identifier[_get_content] ( literal[string] literal[string] . identifier[format] ( identifier[ahrs_upi] , identifier[version] )). identifier[replace] ( literal[string] , literal[string] ) keyword[import] identifier[xml] . identifier[etree] . identifier[ElementTree] keyword[as] identifier[ET] keyword[try] : identifier[root] = identifier[ET] . identifier[parse] ( identifier[io] . identifier[StringIO] ( identifier[content] )). identifier[getroot] () keyword[except] identifier[ET] . identifier[ParseError] : identifier[print] ( literal[string] ) keyword[else] : keyword[for] identifier[child] keyword[in] identifier[root] : identifier[print] ( literal[string] . identifier[format] ( identifier[child] . identifier[tag] , identifier[child] . identifier[text] )) identifier[names] =[ identifier[c] . identifier[text] keyword[for] identifier[c] keyword[in] identifier[root] . identifier[findall] ( literal[string] )] identifier[values] =[[ identifier[i] . identifier[text] keyword[for] identifier[i] keyword[in] identifier[c] ] keyword[for] identifier[c] keyword[in] identifier[root] . identifier[findall] ( literal[string] )] keyword[for] identifier[name] , identifier[value] keyword[in] identifier[zip] ( identifier[names] , identifier[values] ): identifier[print] ( literal[string] . identifier[format] ( identifier[name] , identifier[value] ))
def show_ahrs_calibration(clb_upi, version='3'): """Show AHRS calibration data for given `clb_upi`.""" db = DBManager() ahrs_upi = clbupi2ahrsupi(clb_upi) print('AHRS UPI: {}'.format(ahrs_upi)) content = db._get_content('show_product_test.htm?upi={0}&testtype=AHRS-CALIBRATION-v{1}&n=1&out=xml'.format(ahrs_upi, version)).replace('\n', '') import xml.etree.ElementTree as ET try: root = ET.parse(io.StringIO(content)).getroot() # depends on [control=['try'], data=[]] except ET.ParseError: print('No calibration data found') # depends on [control=['except'], data=[]] else: for child in root: print('{}: {}'.format(child.tag, child.text)) # depends on [control=['for'], data=['child']] names = [c.text for c in root.findall('.//Name')] values = [[i.text for i in c] for c in root.findall('.//Values')] for (name, value) in zip(names, values): print('{}: {}'.format(name, value)) # depends on [control=['for'], data=[]]
def language(s): """ Returns a (language, confidence)-tuple for the given string. """ s = decode_utf8(s) s = set(w.strip(PUNCTUATION) for w in s.replace("'", "' ").split()) n = float(len(s) or 1) p = {} for xx in LANGUAGES: lexicon = _module(xx).__dict__["lexicon"] p[xx] = sum(1 for w in s if w in lexicon) / n return max(p.items(), key=lambda kv: (kv[1], int(kv[0] == "en")))
def function[language, parameter[s]]: constant[ Returns a (language, confidence)-tuple for the given string. ] variable[s] assign[=] call[name[decode_utf8], parameter[name[s]]] variable[s] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da207f9a6b0>]] variable[n] assign[=] call[name[float], parameter[<ast.BoolOp object at 0x7da207f98040>]] variable[p] assign[=] dictionary[[], []] for taget[name[xx]] in starred[name[LANGUAGES]] begin[:] variable[lexicon] assign[=] call[call[name[_module], parameter[name[xx]]].__dict__][constant[lexicon]] call[name[p]][name[xx]] assign[=] binary_operation[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da207f99510>]] / name[n]] return[call[name[max], parameter[call[name[p].items, parameter[]]]]]
keyword[def] identifier[language] ( identifier[s] ): literal[string] identifier[s] = identifier[decode_utf8] ( identifier[s] ) identifier[s] = identifier[set] ( identifier[w] . identifier[strip] ( identifier[PUNCTUATION] ) keyword[for] identifier[w] keyword[in] identifier[s] . identifier[replace] ( literal[string] , literal[string] ). identifier[split] ()) identifier[n] = identifier[float] ( identifier[len] ( identifier[s] ) keyword[or] literal[int] ) identifier[p] ={} keyword[for] identifier[xx] keyword[in] identifier[LANGUAGES] : identifier[lexicon] = identifier[_module] ( identifier[xx] ). identifier[__dict__] [ literal[string] ] identifier[p] [ identifier[xx] ]= identifier[sum] ( literal[int] keyword[for] identifier[w] keyword[in] identifier[s] keyword[if] identifier[w] keyword[in] identifier[lexicon] )/ identifier[n] keyword[return] identifier[max] ( identifier[p] . identifier[items] (), identifier[key] = keyword[lambda] identifier[kv] :( identifier[kv] [ literal[int] ], identifier[int] ( identifier[kv] [ literal[int] ]== literal[string] )))
def language(s): """ Returns a (language, confidence)-tuple for the given string. """ s = decode_utf8(s) s = set((w.strip(PUNCTUATION) for w in s.replace("'", "' ").split())) n = float(len(s) or 1) p = {} for xx in LANGUAGES: lexicon = _module(xx).__dict__['lexicon'] p[xx] = sum((1 for w in s if w in lexicon)) / n # depends on [control=['for'], data=['xx']] return max(p.items(), key=lambda kv: (kv[1], int(kv[0] == 'en')))
def full_prepare(self, obj): """ Make django_ct equal to the type of get_model, to make polymorphic children show up in results. """ prepared_data = super(AbstractLayoutIndex, self).full_prepare(obj) prepared_data['django_ct'] = get_model_ct(self.get_model()) return prepared_data
def function[full_prepare, parameter[self, obj]]: constant[ Make django_ct equal to the type of get_model, to make polymorphic children show up in results. ] variable[prepared_data] assign[=] call[call[name[super], parameter[name[AbstractLayoutIndex], name[self]]].full_prepare, parameter[name[obj]]] call[name[prepared_data]][constant[django_ct]] assign[=] call[name[get_model_ct], parameter[call[name[self].get_model, parameter[]]]] return[name[prepared_data]]
keyword[def] identifier[full_prepare] ( identifier[self] , identifier[obj] ): literal[string] identifier[prepared_data] = identifier[super] ( identifier[AbstractLayoutIndex] , identifier[self] ). identifier[full_prepare] ( identifier[obj] ) identifier[prepared_data] [ literal[string] ]= identifier[get_model_ct] ( identifier[self] . identifier[get_model] ()) keyword[return] identifier[prepared_data]
def full_prepare(self, obj): """ Make django_ct equal to the type of get_model, to make polymorphic children show up in results. """ prepared_data = super(AbstractLayoutIndex, self).full_prepare(obj) prepared_data['django_ct'] = get_model_ct(self.get_model()) return prepared_data
def perr(self, *args, **kwargs): """ Console to STERR """ kwargs['file'] = self.err self.print(*args, **kwargs) sys.stderr.flush()
def function[perr, parameter[self]]: constant[ Console to STERR ] call[name[kwargs]][constant[file]] assign[=] name[self].err call[name[self].print, parameter[<ast.Starred object at 0x7da1b1409bd0>]] call[name[sys].stderr.flush, parameter[]]
keyword[def] identifier[perr] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[err] identifier[self] . identifier[print] (* identifier[args] ,** identifier[kwargs] ) identifier[sys] . identifier[stderr] . identifier[flush] ()
def perr(self, *args, **kwargs): """ Console to STERR """ kwargs['file'] = self.err self.print(*args, **kwargs) sys.stderr.flush()
def schema_create(dbname, name, owner=None, user=None, db_user=None, db_password=None, db_host=None, db_port=None): ''' Creates a Postgres schema. CLI Example: .. code-block:: bash salt '*' postgres.schema_create dbname name owner='owner' \\ user='user' \\ db_user='user' db_password='password' db_host='hostname' db_port='port' ''' # check if schema exists if schema_exists(dbname, name, user=user, db_user=db_user, db_password=db_password, db_host=db_host, db_port=db_port): log.info('\'%s\' already exists in \'%s\'', name, dbname) return False sub_cmd = 'CREATE SCHEMA "{0}"'.format(name) if owner is not None: sub_cmd = '{0} AUTHORIZATION "{1}"'.format(sub_cmd, owner) ret = _psql_prepare_and_run(['-c', sub_cmd], user=db_user, password=db_password, port=db_port, host=db_host, maintenance_db=dbname, runas=user) return ret['retcode'] == 0
def function[schema_create, parameter[dbname, name, owner, user, db_user, db_password, db_host, db_port]]: constant[ Creates a Postgres schema. CLI Example: .. code-block:: bash salt '*' postgres.schema_create dbname name owner='owner' \ user='user' \ db_user='user' db_password='password' db_host='hostname' db_port='port' ] if call[name[schema_exists], parameter[name[dbname], name[name]]] begin[:] call[name[log].info, parameter[constant['%s' already exists in '%s'], name[name], name[dbname]]] return[constant[False]] variable[sub_cmd] assign[=] call[constant[CREATE SCHEMA "{0}"].format, parameter[name[name]]] if compare[name[owner] is_not constant[None]] begin[:] variable[sub_cmd] assign[=] call[constant[{0} AUTHORIZATION "{1}"].format, parameter[name[sub_cmd], name[owner]]] variable[ret] assign[=] call[name[_psql_prepare_and_run], parameter[list[[<ast.Constant object at 0x7da20e957280>, <ast.Name object at 0x7da20e954fa0>]]]] return[compare[call[name[ret]][constant[retcode]] equal[==] constant[0]]]
keyword[def] identifier[schema_create] ( identifier[dbname] , identifier[name] , identifier[owner] = keyword[None] , identifier[user] = keyword[None] , identifier[db_user] = keyword[None] , identifier[db_password] = keyword[None] , identifier[db_host] = keyword[None] , identifier[db_port] = keyword[None] ): literal[string] keyword[if] identifier[schema_exists] ( identifier[dbname] , identifier[name] , identifier[user] = identifier[user] , identifier[db_user] = identifier[db_user] , identifier[db_password] = identifier[db_password] , identifier[db_host] = identifier[db_host] , identifier[db_port] = identifier[db_port] ): identifier[log] . identifier[info] ( literal[string] , identifier[name] , identifier[dbname] ) keyword[return] keyword[False] identifier[sub_cmd] = literal[string] . identifier[format] ( identifier[name] ) keyword[if] identifier[owner] keyword[is] keyword[not] keyword[None] : identifier[sub_cmd] = literal[string] . identifier[format] ( identifier[sub_cmd] , identifier[owner] ) identifier[ret] = identifier[_psql_prepare_and_run] ([ literal[string] , identifier[sub_cmd] ], identifier[user] = identifier[db_user] , identifier[password] = identifier[db_password] , identifier[port] = identifier[db_port] , identifier[host] = identifier[db_host] , identifier[maintenance_db] = identifier[dbname] , identifier[runas] = identifier[user] ) keyword[return] identifier[ret] [ literal[string] ]== literal[int]
def schema_create(dbname, name, owner=None, user=None, db_user=None, db_password=None, db_host=None, db_port=None): """ Creates a Postgres schema. CLI Example: .. code-block:: bash salt '*' postgres.schema_create dbname name owner='owner' \\ user='user' \\ db_user='user' db_password='password' db_host='hostname' db_port='port' """ # check if schema exists if schema_exists(dbname, name, user=user, db_user=db_user, db_password=db_password, db_host=db_host, db_port=db_port): log.info("'%s' already exists in '%s'", name, dbname) return False # depends on [control=['if'], data=[]] sub_cmd = 'CREATE SCHEMA "{0}"'.format(name) if owner is not None: sub_cmd = '{0} AUTHORIZATION "{1}"'.format(sub_cmd, owner) # depends on [control=['if'], data=['owner']] ret = _psql_prepare_and_run(['-c', sub_cmd], user=db_user, password=db_password, port=db_port, host=db_host, maintenance_db=dbname, runas=user) return ret['retcode'] == 0
def _set_esi_timeout(self, v, load=False): """ Setter method for esi_timeout, mapped from YANG variable /isns/isns_vrf/esi_timeout (esi-time) If this variable is read-only (config: false) in the source YANG file, then _set_esi_timeout is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_esi_timeout() directly. YANG Description: This specifies VRF instance esi timeout. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'120..3600']}), is_leaf=True, yang_name="esi-timeout", rest_name="esi-timeout", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'iSNS VRF forwarding esi timeout.Default esi time is 300.', u'hidden': u'esi-timeout'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='esi-time', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """esi_timeout must be of a type compatible with esi-time""", 'defined-type': "brocade-isns:esi-time", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'120..3600']}), is_leaf=True, yang_name="esi-timeout", rest_name="esi-timeout", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'iSNS VRF forwarding esi timeout.Default esi time is 300.', u'hidden': u'esi-timeout'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='esi-time', is_config=True)""", }) self.__esi_timeout = t if hasattr(self, '_set'): self._set()
def function[_set_esi_timeout, parameter[self, v, load]]: constant[ Setter method for esi_timeout, mapped from YANG variable /isns/isns_vrf/esi_timeout (esi-time) If this variable is read-only (config: false) in the source YANG file, then _set_esi_timeout is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_esi_timeout() directly. YANG Description: This specifies VRF instance esi timeout. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18f813af0> name[self].__esi_timeout assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_esi_timeout] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[long] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}, identifier[int_size] = literal[int] ), identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__esi_timeout] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_esi_timeout(self, v, load=False): """ Setter method for esi_timeout, mapped from YANG variable /isns/isns_vrf/esi_timeout (esi-time) If this variable is read-only (config: false) in the source YANG file, then _set_esi_timeout is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_esi_timeout() directly. YANG Description: This specifies VRF instance esi timeout. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'120..3600']}), is_leaf=True, yang_name='esi-timeout', rest_name='esi-timeout', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'iSNS VRF forwarding esi timeout.Default esi time is 300.', u'hidden': u'esi-timeout'}}, namespace='urn:brocade.com:mgmt:brocade-isns', defining_module='brocade-isns', yang_type='esi-time', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'esi_timeout must be of a type compatible with esi-time', 'defined-type': 'brocade-isns:esi-time', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={\'range\': [\'0..4294967295\']}, int_size=32), restriction_dict={\'range\': [u\'120..3600\']}), is_leaf=True, yang_name="esi-timeout", rest_name="esi-timeout", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'iSNS VRF forwarding esi timeout.Default esi time is 300.\', u\'hidden\': u\'esi-timeout\'}}, namespace=\'urn:brocade.com:mgmt:brocade-isns\', defining_module=\'brocade-isns\', yang_type=\'esi-time\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__esi_timeout = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def shutdown(self, how=socket.SHUT_RDWR): """ Send a shutdown signal for both reading and writing, or whatever socket.SHUT_* constant you like. Shutdown differs from closing in that it explicitly changes the state of the socket resource to closed, whereas closing will only decrement the number of peers on this end of the socket, since sockets can be a resource shared by multiple peers on a single OS. When the number of peers reaches zero, the socket is closed, but not deallocated, so you still need to call close. (except that this is python and close is automatically called on the deletion of the socket) http://stackoverflow.com/questions/409783/socket-shutdown-vs-socket-close """ if self._sock_send is not None: self._sock_send.shutdown(how) return self.sock.shutdown(how)
def function[shutdown, parameter[self, how]]: constant[ Send a shutdown signal for both reading and writing, or whatever socket.SHUT_* constant you like. Shutdown differs from closing in that it explicitly changes the state of the socket resource to closed, whereas closing will only decrement the number of peers on this end of the socket, since sockets can be a resource shared by multiple peers on a single OS. When the number of peers reaches zero, the socket is closed, but not deallocated, so you still need to call close. (except that this is python and close is automatically called on the deletion of the socket) http://stackoverflow.com/questions/409783/socket-shutdown-vs-socket-close ] if compare[name[self]._sock_send is_not constant[None]] begin[:] call[name[self]._sock_send.shutdown, parameter[name[how]]] return[call[name[self].sock.shutdown, parameter[name[how]]]]
keyword[def] identifier[shutdown] ( identifier[self] , identifier[how] = identifier[socket] . identifier[SHUT_RDWR] ): literal[string] keyword[if] identifier[self] . identifier[_sock_send] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_sock_send] . identifier[shutdown] ( identifier[how] ) keyword[return] identifier[self] . identifier[sock] . identifier[shutdown] ( identifier[how] )
def shutdown(self, how=socket.SHUT_RDWR): """ Send a shutdown signal for both reading and writing, or whatever socket.SHUT_* constant you like. Shutdown differs from closing in that it explicitly changes the state of the socket resource to closed, whereas closing will only decrement the number of peers on this end of the socket, since sockets can be a resource shared by multiple peers on a single OS. When the number of peers reaches zero, the socket is closed, but not deallocated, so you still need to call close. (except that this is python and close is automatically called on the deletion of the socket) http://stackoverflow.com/questions/409783/socket-shutdown-vs-socket-close """ if self._sock_send is not None: self._sock_send.shutdown(how) # depends on [control=['if'], data=[]] return self.sock.shutdown(how)
def answer(self): """ Answer the phone call. :return: self (for chaining method calls) """ if self.ringing: self._gsmModem.write('ATA') self.ringing = False self.answered = True return self
def function[answer, parameter[self]]: constant[ Answer the phone call. :return: self (for chaining method calls) ] if name[self].ringing begin[:] call[name[self]._gsmModem.write, parameter[constant[ATA]]] name[self].ringing assign[=] constant[False] name[self].answered assign[=] constant[True] return[name[self]]
keyword[def] identifier[answer] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[ringing] : identifier[self] . identifier[_gsmModem] . identifier[write] ( literal[string] ) identifier[self] . identifier[ringing] = keyword[False] identifier[self] . identifier[answered] = keyword[True] keyword[return] identifier[self]
def answer(self): """ Answer the phone call. :return: self (for chaining method calls) """ if self.ringing: self._gsmModem.write('ATA') self.ringing = False self.answered = True # depends on [control=['if'], data=[]] return self
def _limit_data(self, data): """ Find the per-day average of each series in the data over the last 7 days; drop all but the top 10. :param data: original graph data :type data: dict :return: dict containing only the top 10 series, based on average over the last 7 days. :rtype: dict """ if len(data.keys()) <= 10: logger.debug("Data has less than 10 keys; not limiting") return data # average last 7 days of each series avgs = {} for k in data: if len(data[k]) <= 7: vals = data[k] else: vals = data[k][-7:] avgs[k] = sum(vals) / len(vals) # hold state final_data = {} # final data dict other = [] # values for dropped/'other' series count = 0 # iteration counter # iterate the sorted averages; either drop or keep for k in sorted(avgs, key=avgs.get, reverse=True): if count < 10: final_data[k] = data[k] logger.debug("Keeping data series %s (average over last 7 " "days of data: %d", k, avgs[k]) else: logger.debug("Adding data series %s to 'other' (average over " "last 7 days of data: %d", k, avgs[k]) other.append(data[k]) count += 1 # sum up the other data and add to final final_data['other'] = [sum(series) for series in zip(*other)] return final_data
def function[_limit_data, parameter[self, data]]: constant[ Find the per-day average of each series in the data over the last 7 days; drop all but the top 10. :param data: original graph data :type data: dict :return: dict containing only the top 10 series, based on average over the last 7 days. :rtype: dict ] if compare[call[name[len], parameter[call[name[data].keys, parameter[]]]] less_or_equal[<=] constant[10]] begin[:] call[name[logger].debug, parameter[constant[Data has less than 10 keys; not limiting]]] return[name[data]] variable[avgs] assign[=] dictionary[[], []] for taget[name[k]] in starred[name[data]] begin[:] if compare[call[name[len], parameter[call[name[data]][name[k]]]] less_or_equal[<=] constant[7]] begin[:] variable[vals] assign[=] call[name[data]][name[k]] call[name[avgs]][name[k]] assign[=] binary_operation[call[name[sum], parameter[name[vals]]] / call[name[len], parameter[name[vals]]]] variable[final_data] assign[=] dictionary[[], []] variable[other] assign[=] list[[]] variable[count] assign[=] constant[0] for taget[name[k]] in starred[call[name[sorted], parameter[name[avgs]]]] begin[:] if compare[name[count] less[<] constant[10]] begin[:] call[name[final_data]][name[k]] assign[=] call[name[data]][name[k]] call[name[logger].debug, parameter[constant[Keeping data series %s (average over last 7 days of data: %d], name[k], call[name[avgs]][name[k]]]] <ast.AugAssign object at 0x7da2054a4d60> call[name[final_data]][constant[other]] assign[=] <ast.ListComp object at 0x7da2054a4fa0> return[name[final_data]]
keyword[def] identifier[_limit_data] ( identifier[self] , identifier[data] ): literal[string] keyword[if] identifier[len] ( identifier[data] . identifier[keys] ())<= literal[int] : identifier[logger] . identifier[debug] ( literal[string] ) keyword[return] identifier[data] identifier[avgs] ={} keyword[for] identifier[k] keyword[in] identifier[data] : keyword[if] identifier[len] ( identifier[data] [ identifier[k] ])<= literal[int] : identifier[vals] = identifier[data] [ identifier[k] ] keyword[else] : identifier[vals] = identifier[data] [ identifier[k] ][- literal[int] :] identifier[avgs] [ identifier[k] ]= identifier[sum] ( identifier[vals] )/ identifier[len] ( identifier[vals] ) identifier[final_data] ={} identifier[other] =[] identifier[count] = literal[int] keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[avgs] , identifier[key] = identifier[avgs] . identifier[get] , identifier[reverse] = keyword[True] ): keyword[if] identifier[count] < literal[int] : identifier[final_data] [ identifier[k] ]= identifier[data] [ identifier[k] ] identifier[logger] . identifier[debug] ( literal[string] literal[string] , identifier[k] , identifier[avgs] [ identifier[k] ]) keyword[else] : identifier[logger] . identifier[debug] ( literal[string] literal[string] , identifier[k] , identifier[avgs] [ identifier[k] ]) identifier[other] . identifier[append] ( identifier[data] [ identifier[k] ]) identifier[count] += literal[int] identifier[final_data] [ literal[string] ]=[ identifier[sum] ( identifier[series] ) keyword[for] identifier[series] keyword[in] identifier[zip] (* identifier[other] )] keyword[return] identifier[final_data]
def _limit_data(self, data): """ Find the per-day average of each series in the data over the last 7 days; drop all but the top 10. :param data: original graph data :type data: dict :return: dict containing only the top 10 series, based on average over the last 7 days. :rtype: dict """ if len(data.keys()) <= 10: logger.debug('Data has less than 10 keys; not limiting') return data # depends on [control=['if'], data=[]] # average last 7 days of each series avgs = {} for k in data: if len(data[k]) <= 7: vals = data[k] # depends on [control=['if'], data=[]] else: vals = data[k][-7:] avgs[k] = sum(vals) / len(vals) # depends on [control=['for'], data=['k']] # hold state final_data = {} # final data dict other = [] # values for dropped/'other' series count = 0 # iteration counter # iterate the sorted averages; either drop or keep for k in sorted(avgs, key=avgs.get, reverse=True): if count < 10: final_data[k] = data[k] logger.debug('Keeping data series %s (average over last 7 days of data: %d', k, avgs[k]) # depends on [control=['if'], data=[]] else: logger.debug("Adding data series %s to 'other' (average over last 7 days of data: %d", k, avgs[k]) other.append(data[k]) count += 1 # depends on [control=['for'], data=['k']] # sum up the other data and add to final final_data['other'] = [sum(series) for series in zip(*other)] return final_data
def map(self, f, preservesPartitioning=False): """Apply function f :param f: mapping function :rtype: DStream Example: >>> import pysparkling >>> sc = pysparkling.Context() >>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1) >>> ( ... ssc ... .queueStream([[4], [2], [7]]) ... .map(lambda e: e + 1) ... .foreachRDD(lambda rdd: print(rdd.collect())) ... ) >>> ssc.start() >>> ssc.awaitTermination(0.35) [5] [3] [8] """ return ( self .mapPartitions(lambda p: (f(e) for e in p), preservesPartitioning) .transform(lambda rdd: rdd.setName('{}:{}'.format(rdd.prev.name(), f))) )
def function[map, parameter[self, f, preservesPartitioning]]: constant[Apply function f :param f: mapping function :rtype: DStream Example: >>> import pysparkling >>> sc = pysparkling.Context() >>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1) >>> ( ... ssc ... .queueStream([[4], [2], [7]]) ... .map(lambda e: e + 1) ... .foreachRDD(lambda rdd: print(rdd.collect())) ... ) >>> ssc.start() >>> ssc.awaitTermination(0.35) [5] [3] [8] ] return[call[call[name[self].mapPartitions, parameter[<ast.Lambda object at 0x7da1b08477c0>, name[preservesPartitioning]]].transform, parameter[<ast.Lambda object at 0x7da1b0844730>]]]
keyword[def] identifier[map] ( identifier[self] , identifier[f] , identifier[preservesPartitioning] = keyword[False] ): literal[string] keyword[return] ( identifier[self] . identifier[mapPartitions] ( keyword[lambda] identifier[p] :( identifier[f] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[p] ), identifier[preservesPartitioning] ) . identifier[transform] ( keyword[lambda] identifier[rdd] : identifier[rdd] . identifier[setName] ( literal[string] . identifier[format] ( identifier[rdd] . identifier[prev] . identifier[name] (), identifier[f] ))) )
def map(self, f, preservesPartitioning=False): """Apply function f :param f: mapping function :rtype: DStream Example: >>> import pysparkling >>> sc = pysparkling.Context() >>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1) >>> ( ... ssc ... .queueStream([[4], [2], [7]]) ... .map(lambda e: e + 1) ... .foreachRDD(lambda rdd: print(rdd.collect())) ... ) >>> ssc.start() >>> ssc.awaitTermination(0.35) [5] [3] [8] """ return self.mapPartitions(lambda p: (f(e) for e in p), preservesPartitioning).transform(lambda rdd: rdd.setName('{}:{}'.format(rdd.prev.name(), f)))
def getAuthenticator(self, service_request): """ Gets an authenticator callable based on the service_request. This is granular, looking at the service method first, then at the service level and finally to see if there is a global authenticator function for the gateway. Returns C{None} if one could not be found. """ auth = service_request.service.getAuthenticator(service_request) if auth is None: return self.authenticator return auth
def function[getAuthenticator, parameter[self, service_request]]: constant[ Gets an authenticator callable based on the service_request. This is granular, looking at the service method first, then at the service level and finally to see if there is a global authenticator function for the gateway. Returns C{None} if one could not be found. ] variable[auth] assign[=] call[name[service_request].service.getAuthenticator, parameter[name[service_request]]] if compare[name[auth] is constant[None]] begin[:] return[name[self].authenticator] return[name[auth]]
keyword[def] identifier[getAuthenticator] ( identifier[self] , identifier[service_request] ): literal[string] identifier[auth] = identifier[service_request] . identifier[service] . identifier[getAuthenticator] ( identifier[service_request] ) keyword[if] identifier[auth] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[authenticator] keyword[return] identifier[auth]
def getAuthenticator(self, service_request): """ Gets an authenticator callable based on the service_request. This is granular, looking at the service method first, then at the service level and finally to see if there is a global authenticator function for the gateway. Returns C{None} if one could not be found. """ auth = service_request.service.getAuthenticator(service_request) if auth is None: return self.authenticator # depends on [control=['if'], data=[]] return auth
def spm_dispersion_derivative(tr, oversampling=50, time_length=32., onset=0.): """Implementation of the SPM dispersion derivative hrf model Parameters ---------- tr: float scan repeat time, in seconds oversampling: int, optional temporal oversampling factor in seconds time_length: float, optional hrf kernel length, in seconds onset : float, optional onset of the response in seconds Returns ------- dhrf: array of shape(length / tr * oversampling), dtype=float dhrf sampling on the oversampled time grid """ dd = .01 dhrf = 1. / dd * ( - _gamma_difference_hrf(tr, oversampling, time_length, onset, dispersion=1. + dd) + _gamma_difference_hrf(tr, oversampling, time_length, onset)) return dhrf
def function[spm_dispersion_derivative, parameter[tr, oversampling, time_length, onset]]: constant[Implementation of the SPM dispersion derivative hrf model Parameters ---------- tr: float scan repeat time, in seconds oversampling: int, optional temporal oversampling factor in seconds time_length: float, optional hrf kernel length, in seconds onset : float, optional onset of the response in seconds Returns ------- dhrf: array of shape(length / tr * oversampling), dtype=float dhrf sampling on the oversampled time grid ] variable[dd] assign[=] constant[0.01] variable[dhrf] assign[=] binary_operation[binary_operation[constant[1.0] / name[dd]] * binary_operation[<ast.UnaryOp object at 0x7da1b1005c60> + call[name[_gamma_difference_hrf], parameter[name[tr], name[oversampling], name[time_length], name[onset]]]]] return[name[dhrf]]
keyword[def] identifier[spm_dispersion_derivative] ( identifier[tr] , identifier[oversampling] = literal[int] , identifier[time_length] = literal[int] , identifier[onset] = literal[int] ): literal[string] identifier[dd] = literal[int] identifier[dhrf] = literal[int] / identifier[dd] *( - identifier[_gamma_difference_hrf] ( identifier[tr] , identifier[oversampling] , identifier[time_length] , identifier[onset] , identifier[dispersion] = literal[int] + identifier[dd] ) + identifier[_gamma_difference_hrf] ( identifier[tr] , identifier[oversampling] , identifier[time_length] , identifier[onset] )) keyword[return] identifier[dhrf]
def spm_dispersion_derivative(tr, oversampling=50, time_length=32.0, onset=0.0): """Implementation of the SPM dispersion derivative hrf model Parameters ---------- tr: float scan repeat time, in seconds oversampling: int, optional temporal oversampling factor in seconds time_length: float, optional hrf kernel length, in seconds onset : float, optional onset of the response in seconds Returns ------- dhrf: array of shape(length / tr * oversampling), dtype=float dhrf sampling on the oversampled time grid """ dd = 0.01 dhrf = 1.0 / dd * (-_gamma_difference_hrf(tr, oversampling, time_length, onset, dispersion=1.0 + dd) + _gamma_difference_hrf(tr, oversampling, time_length, onset)) return dhrf
def receive_ack_renewing(self, pkt): """Receive ACK in RENEWING state.""" logger.debug("C3. Received ACK?, in RENEWING state.") if self.process_received_ack(pkt): logger.debug("C3: T. Received ACK, in RENEWING state, " "raise BOUND.") raise self.BOUND()
def function[receive_ack_renewing, parameter[self, pkt]]: constant[Receive ACK in RENEWING state.] call[name[logger].debug, parameter[constant[C3. Received ACK?, in RENEWING state.]]] if call[name[self].process_received_ack, parameter[name[pkt]]] begin[:] call[name[logger].debug, parameter[constant[C3: T. Received ACK, in RENEWING state, raise BOUND.]]] <ast.Raise object at 0x7da1b03bac80>
keyword[def] identifier[receive_ack_renewing] ( identifier[self] , identifier[pkt] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) keyword[if] identifier[self] . identifier[process_received_ack] ( identifier[pkt] ): identifier[logger] . identifier[debug] ( literal[string] literal[string] ) keyword[raise] identifier[self] . identifier[BOUND] ()
def receive_ack_renewing(self, pkt): """Receive ACK in RENEWING state.""" logger.debug('C3. Received ACK?, in RENEWING state.') if self.process_received_ack(pkt): logger.debug('C3: T. Received ACK, in RENEWING state, raise BOUND.') raise self.BOUND() # depends on [control=['if'], data=[]]
def max_linear_flow(Diam, HeadlossCDC, Ratio_Error, KMinor): """Return the maximum flow that will meet the linear requirement. Maximum flow that can be put through a tube of a given diameter without exceeding the allowable deviation from linear head loss behavior """ flow = (pc.area_circle(Diam)).magnitude * np.sqrt((2 * Ratio_Error * HeadlossCDC * pc.gravity)/ KMinor) return flow.magnitude
def function[max_linear_flow, parameter[Diam, HeadlossCDC, Ratio_Error, KMinor]]: constant[Return the maximum flow that will meet the linear requirement. Maximum flow that can be put through a tube of a given diameter without exceeding the allowable deviation from linear head loss behavior ] variable[flow] assign[=] binary_operation[call[name[pc].area_circle, parameter[name[Diam]]].magnitude * call[name[np].sqrt, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[2] * name[Ratio_Error]] * name[HeadlossCDC]] * name[pc].gravity] / name[KMinor]]]]] return[name[flow].magnitude]
keyword[def] identifier[max_linear_flow] ( identifier[Diam] , identifier[HeadlossCDC] , identifier[Ratio_Error] , identifier[KMinor] ): literal[string] identifier[flow] =( identifier[pc] . identifier[area_circle] ( identifier[Diam] )). identifier[magnitude] * identifier[np] . identifier[sqrt] (( literal[int] * identifier[Ratio_Error] * identifier[HeadlossCDC] * identifier[pc] . identifier[gravity] )/ identifier[KMinor] ) keyword[return] identifier[flow] . identifier[magnitude]
def max_linear_flow(Diam, HeadlossCDC, Ratio_Error, KMinor): """Return the maximum flow that will meet the linear requirement. Maximum flow that can be put through a tube of a given diameter without exceeding the allowable deviation from linear head loss behavior """ flow = pc.area_circle(Diam).magnitude * np.sqrt(2 * Ratio_Error * HeadlossCDC * pc.gravity / KMinor) return flow.magnitude
def edge_predicate(func: DictEdgePredicate) -> EdgePredicate: # noqa: D202 """Decorate an edge predicate function that only takes a dictionary as its singular argument. Apply this as a decorator to a function that takes a single argument, a PyBEL node data dictionary, to make sure that it can also accept a pair of arguments, a BELGraph and a PyBEL node tuple as well. """ @wraps(func) def _wrapped(*args): x = args[0] if isinstance(x, BELGraph): u, v, k = args[1:4] return func(x[u][v][k]) return func(*args) return _wrapped
def function[edge_predicate, parameter[func]]: constant[Decorate an edge predicate function that only takes a dictionary as its singular argument. Apply this as a decorator to a function that takes a single argument, a PyBEL node data dictionary, to make sure that it can also accept a pair of arguments, a BELGraph and a PyBEL node tuple as well. ] def function[_wrapped, parameter[]]: variable[x] assign[=] call[name[args]][constant[0]] if call[name[isinstance], parameter[name[x], name[BELGraph]]] begin[:] <ast.Tuple object at 0x7da1b0e45ab0> assign[=] call[name[args]][<ast.Slice object at 0x7da1b0e46260>] return[call[name[func], parameter[call[call[call[name[x]][name[u]]][name[v]]][name[k]]]]] return[call[name[func], parameter[<ast.Starred object at 0x7da1b0e44430>]]] return[name[_wrapped]]
keyword[def] identifier[edge_predicate] ( identifier[func] : identifier[DictEdgePredicate] )-> identifier[EdgePredicate] : literal[string] @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[_wrapped] (* identifier[args] ): identifier[x] = identifier[args] [ literal[int] ] keyword[if] identifier[isinstance] ( identifier[x] , identifier[BELGraph] ): identifier[u] , identifier[v] , identifier[k] = identifier[args] [ literal[int] : literal[int] ] keyword[return] identifier[func] ( identifier[x] [ identifier[u] ][ identifier[v] ][ identifier[k] ]) keyword[return] identifier[func] (* identifier[args] ) keyword[return] identifier[_wrapped]
def edge_predicate(func: DictEdgePredicate) -> EdgePredicate: # noqa: D202 'Decorate an edge predicate function that only takes a dictionary as its singular argument.\n\n Apply this as a decorator to a function that takes a single argument, a PyBEL node data dictionary, to make\n sure that it can also accept a pair of arguments, a BELGraph and a PyBEL node tuple as well.\n ' @wraps(func) def _wrapped(*args): x = args[0] if isinstance(x, BELGraph): (u, v, k) = args[1:4] return func(x[u][v][k]) # depends on [control=['if'], data=[]] return func(*args) return _wrapped
def child_task(self, q, l, gq, gl): '''child process - this holds GUI elements''' mp_util.child_close_fds() from ..lib import wx_processguard from ..lib.wx_loader import wx from MAVProxy.modules.mavproxy_misseditor import missionEditorFrame self.app = wx.App(False) self.app.frame = missionEditorFrame.MissionEditorFrame(parent=None,id=wx.ID_ANY) self.app.frame.set_event_queue(q) self.app.frame.set_event_queue_lock(l) self.app.frame.set_gui_event_queue(gq) self.app.frame.set_gui_event_queue_lock(gl) self.app.frame.Show() self.app.MainLoop()
def function[child_task, parameter[self, q, l, gq, gl]]: constant[child process - this holds GUI elements] call[name[mp_util].child_close_fds, parameter[]] from relative_module[lib] import module[wx_processguard] from relative_module[lib.wx_loader] import module[wx] from relative_module[MAVProxy.modules.mavproxy_misseditor] import module[missionEditorFrame] name[self].app assign[=] call[name[wx].App, parameter[constant[False]]] name[self].app.frame assign[=] call[name[missionEditorFrame].MissionEditorFrame, parameter[]] call[name[self].app.frame.set_event_queue, parameter[name[q]]] call[name[self].app.frame.set_event_queue_lock, parameter[name[l]]] call[name[self].app.frame.set_gui_event_queue, parameter[name[gq]]] call[name[self].app.frame.set_gui_event_queue_lock, parameter[name[gl]]] call[name[self].app.frame.Show, parameter[]] call[name[self].app.MainLoop, parameter[]]
keyword[def] identifier[child_task] ( identifier[self] , identifier[q] , identifier[l] , identifier[gq] , identifier[gl] ): literal[string] identifier[mp_util] . identifier[child_close_fds] () keyword[from] .. identifier[lib] keyword[import] identifier[wx_processguard] keyword[from] .. identifier[lib] . identifier[wx_loader] keyword[import] identifier[wx] keyword[from] identifier[MAVProxy] . identifier[modules] . identifier[mavproxy_misseditor] keyword[import] identifier[missionEditorFrame] identifier[self] . identifier[app] = identifier[wx] . identifier[App] ( keyword[False] ) identifier[self] . identifier[app] . identifier[frame] = identifier[missionEditorFrame] . identifier[MissionEditorFrame] ( identifier[parent] = keyword[None] , identifier[id] = identifier[wx] . identifier[ID_ANY] ) identifier[self] . identifier[app] . identifier[frame] . identifier[set_event_queue] ( identifier[q] ) identifier[self] . identifier[app] . identifier[frame] . identifier[set_event_queue_lock] ( identifier[l] ) identifier[self] . identifier[app] . identifier[frame] . identifier[set_gui_event_queue] ( identifier[gq] ) identifier[self] . identifier[app] . identifier[frame] . identifier[set_gui_event_queue_lock] ( identifier[gl] ) identifier[self] . identifier[app] . identifier[frame] . identifier[Show] () identifier[self] . identifier[app] . identifier[MainLoop] ()
def child_task(self, q, l, gq, gl): """child process - this holds GUI elements""" mp_util.child_close_fds() from ..lib import wx_processguard from ..lib.wx_loader import wx from MAVProxy.modules.mavproxy_misseditor import missionEditorFrame self.app = wx.App(False) self.app.frame = missionEditorFrame.MissionEditorFrame(parent=None, id=wx.ID_ANY) self.app.frame.set_event_queue(q) self.app.frame.set_event_queue_lock(l) self.app.frame.set_gui_event_queue(gq) self.app.frame.set_gui_event_queue_lock(gl) self.app.frame.Show() self.app.MainLoop()
def import_keypair(kwargs=None, call=None): ''' Upload public key to cloud provider. Similar to EC2 import_keypair. .. versionadded:: 2016.11.0 kwargs file(mandatory): public key file-name keyname(mandatory): public key name in the provider ''' with salt.utils.files.fopen(kwargs['file'], 'r') as public_key_filename: public_key_content = salt.utils.stringutils.to_unicode(public_key_filename.read()) digitalocean_kwargs = { 'name': kwargs['keyname'], 'public_key': public_key_content } created_result = create_key(digitalocean_kwargs, call=call) return created_result
def function[import_keypair, parameter[kwargs, call]]: constant[ Upload public key to cloud provider. Similar to EC2 import_keypair. .. versionadded:: 2016.11.0 kwargs file(mandatory): public key file-name keyname(mandatory): public key name in the provider ] with call[name[salt].utils.files.fopen, parameter[call[name[kwargs]][constant[file]], constant[r]]] begin[:] variable[public_key_content] assign[=] call[name[salt].utils.stringutils.to_unicode, parameter[call[name[public_key_filename].read, parameter[]]]] variable[digitalocean_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b21bc640>, <ast.Constant object at 0x7da1b21bc3a0>], [<ast.Subscript object at 0x7da1b21bdc00>, <ast.Name object at 0x7da1b21bc850>]] variable[created_result] assign[=] call[name[create_key], parameter[name[digitalocean_kwargs]]] return[name[created_result]]
keyword[def] identifier[import_keypair] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ): literal[string] keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[kwargs] [ literal[string] ], literal[string] ) keyword[as] identifier[public_key_filename] : identifier[public_key_content] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[public_key_filename] . identifier[read] ()) identifier[digitalocean_kwargs] ={ literal[string] : identifier[kwargs] [ literal[string] ], literal[string] : identifier[public_key_content] } identifier[created_result] = identifier[create_key] ( identifier[digitalocean_kwargs] , identifier[call] = identifier[call] ) keyword[return] identifier[created_result]
def import_keypair(kwargs=None, call=None): """ Upload public key to cloud provider. Similar to EC2 import_keypair. .. versionadded:: 2016.11.0 kwargs file(mandatory): public key file-name keyname(mandatory): public key name in the provider """ with salt.utils.files.fopen(kwargs['file'], 'r') as public_key_filename: public_key_content = salt.utils.stringutils.to_unicode(public_key_filename.read()) # depends on [control=['with'], data=['public_key_filename']] digitalocean_kwargs = {'name': kwargs['keyname'], 'public_key': public_key_content} created_result = create_key(digitalocean_kwargs, call=call) return created_result
def message_to_dict(msg): """Convert an email message into a dictionary. This function transforms an `email.message.Message` object into a dictionary. Headers are stored as key:value pairs while the body of the message is stored inside `body` key. Body may have two other keys inside, 'plain', for plain body messages and 'html', for HTML encoded messages. The returned dictionary has the type `requests.structures.CaseInsensitiveDict` due to same headers with different case formats can appear in the same message. :param msg: email message of type `email.message.Message` :returns : dictionary of type `requests.structures.CaseInsensitiveDict` :raises ParseError: when an error occurs transforming the message to a dictionary """ def parse_headers(msg): headers = {} for header, value in msg.items(): hv = [] for text, charset in email.header.decode_header(value): if type(text) == bytes: charset = charset if charset else 'utf-8' try: text = text.decode(charset, errors='surrogateescape') except (UnicodeError, LookupError): # Try again with a 7bit encoding text = text.decode('ascii', errors='surrogateescape') hv.append(text) v = ' '.join(hv) headers[header] = v if v else None return headers def parse_payload(msg): body = {} if not msg.is_multipart(): payload = decode_payload(msg) subtype = msg.get_content_subtype() body[subtype] = [payload] else: # Include all the attached texts if it is multipart # Ignores binary parts by default for part in email.iterators.typed_subpart_iterator(msg): payload = decode_payload(part) subtype = part.get_content_subtype() body.setdefault(subtype, []).append(payload) return {k: '\n'.join(v) for k, v in body.items()} def decode_payload(msg_or_part): charset = msg_or_part.get_content_charset('utf-8') payload = msg_or_part.get_payload(decode=True) try: payload = payload.decode(charset, errors='surrogateescape') except (UnicodeError, LookupError): # Try again with a 7bit encoding payload = payload.decode('ascii', errors='surrogateescape') return payload # The function starts here message = requests.structures.CaseInsensitiveDict() if isinstance(msg, mailbox.mboxMessage): message['unixfrom'] = msg.get_from() else: message['unixfrom'] = None try: for k, v in parse_headers(msg).items(): message[k] = v message['body'] = parse_payload(msg) except UnicodeError as e: raise ParseError(cause=str(e)) return message
def function[message_to_dict, parameter[msg]]: constant[Convert an email message into a dictionary. This function transforms an `email.message.Message` object into a dictionary. Headers are stored as key:value pairs while the body of the message is stored inside `body` key. Body may have two other keys inside, 'plain', for plain body messages and 'html', for HTML encoded messages. The returned dictionary has the type `requests.structures.CaseInsensitiveDict` due to same headers with different case formats can appear in the same message. :param msg: email message of type `email.message.Message` :returns : dictionary of type `requests.structures.CaseInsensitiveDict` :raises ParseError: when an error occurs transforming the message to a dictionary ] def function[parse_headers, parameter[msg]]: variable[headers] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b0314250>, <ast.Name object at 0x7da1b03168f0>]]] in starred[call[name[msg].items, parameter[]]] begin[:] variable[hv] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0314f10>, <ast.Name object at 0x7da1b0317e80>]]] in starred[call[name[email].header.decode_header, parameter[name[value]]]] begin[:] if compare[call[name[type], parameter[name[text]]] equal[==] name[bytes]] begin[:] variable[charset] assign[=] <ast.IfExp object at 0x7da1b0314fa0> <ast.Try object at 0x7da1b0314d90> call[name[hv].append, parameter[name[text]]] variable[v] assign[=] call[constant[ ].join, parameter[name[hv]]] call[name[headers]][name[header]] assign[=] <ast.IfExp object at 0x7da1b0316a40> return[name[headers]] def function[parse_payload, parameter[msg]]: variable[body] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da1b0314130> begin[:] variable[payload] assign[=] call[name[decode_payload], parameter[name[msg]]] variable[subtype] assign[=] call[name[msg].get_content_subtype, parameter[]] call[name[body]][name[subtype]] assign[=] list[[<ast.Name object at 0x7da1b0315f30>]] return[<ast.DictComp object at 0x7da1b0317a30>] def function[decode_payload, parameter[msg_or_part]]: variable[charset] assign[=] call[name[msg_or_part].get_content_charset, parameter[constant[utf-8]]] variable[payload] assign[=] call[name[msg_or_part].get_payload, parameter[]] <ast.Try object at 0x7da1b0315d80> return[name[payload]] variable[message] assign[=] call[name[requests].structures.CaseInsensitiveDict, parameter[]] if call[name[isinstance], parameter[name[msg], name[mailbox].mboxMessage]] begin[:] call[name[message]][constant[unixfrom]] assign[=] call[name[msg].get_from, parameter[]] <ast.Try object at 0x7da1b059d660> return[name[message]]
keyword[def] identifier[message_to_dict] ( identifier[msg] ): literal[string] keyword[def] identifier[parse_headers] ( identifier[msg] ): identifier[headers] ={} keyword[for] identifier[header] , identifier[value] keyword[in] identifier[msg] . identifier[items] (): identifier[hv] =[] keyword[for] identifier[text] , identifier[charset] keyword[in] identifier[email] . identifier[header] . identifier[decode_header] ( identifier[value] ): keyword[if] identifier[type] ( identifier[text] )== identifier[bytes] : identifier[charset] = identifier[charset] keyword[if] identifier[charset] keyword[else] literal[string] keyword[try] : identifier[text] = identifier[text] . identifier[decode] ( identifier[charset] , identifier[errors] = literal[string] ) keyword[except] ( identifier[UnicodeError] , identifier[LookupError] ): identifier[text] = identifier[text] . identifier[decode] ( literal[string] , identifier[errors] = literal[string] ) identifier[hv] . identifier[append] ( identifier[text] ) identifier[v] = literal[string] . identifier[join] ( identifier[hv] ) identifier[headers] [ identifier[header] ]= identifier[v] keyword[if] identifier[v] keyword[else] keyword[None] keyword[return] identifier[headers] keyword[def] identifier[parse_payload] ( identifier[msg] ): identifier[body] ={} keyword[if] keyword[not] identifier[msg] . identifier[is_multipart] (): identifier[payload] = identifier[decode_payload] ( identifier[msg] ) identifier[subtype] = identifier[msg] . identifier[get_content_subtype] () identifier[body] [ identifier[subtype] ]=[ identifier[payload] ] keyword[else] : keyword[for] identifier[part] keyword[in] identifier[email] . identifier[iterators] . identifier[typed_subpart_iterator] ( identifier[msg] ): identifier[payload] = identifier[decode_payload] ( identifier[part] ) identifier[subtype] = identifier[part] . identifier[get_content_subtype] () identifier[body] . identifier[setdefault] ( identifier[subtype] ,[]). identifier[append] ( identifier[payload] ) keyword[return] { identifier[k] : literal[string] . identifier[join] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[body] . identifier[items] ()} keyword[def] identifier[decode_payload] ( identifier[msg_or_part] ): identifier[charset] = identifier[msg_or_part] . identifier[get_content_charset] ( literal[string] ) identifier[payload] = identifier[msg_or_part] . identifier[get_payload] ( identifier[decode] = keyword[True] ) keyword[try] : identifier[payload] = identifier[payload] . identifier[decode] ( identifier[charset] , identifier[errors] = literal[string] ) keyword[except] ( identifier[UnicodeError] , identifier[LookupError] ): identifier[payload] = identifier[payload] . identifier[decode] ( literal[string] , identifier[errors] = literal[string] ) keyword[return] identifier[payload] identifier[message] = identifier[requests] . identifier[structures] . identifier[CaseInsensitiveDict] () keyword[if] identifier[isinstance] ( identifier[msg] , identifier[mailbox] . identifier[mboxMessage] ): identifier[message] [ literal[string] ]= identifier[msg] . identifier[get_from] () keyword[else] : identifier[message] [ literal[string] ]= keyword[None] keyword[try] : keyword[for] identifier[k] , identifier[v] keyword[in] identifier[parse_headers] ( identifier[msg] ). identifier[items] (): identifier[message] [ identifier[k] ]= identifier[v] identifier[message] [ literal[string] ]= identifier[parse_payload] ( identifier[msg] ) keyword[except] identifier[UnicodeError] keyword[as] identifier[e] : keyword[raise] identifier[ParseError] ( identifier[cause] = identifier[str] ( identifier[e] )) keyword[return] identifier[message]
def message_to_dict(msg): """Convert an email message into a dictionary. This function transforms an `email.message.Message` object into a dictionary. Headers are stored as key:value pairs while the body of the message is stored inside `body` key. Body may have two other keys inside, 'plain', for plain body messages and 'html', for HTML encoded messages. The returned dictionary has the type `requests.structures.CaseInsensitiveDict` due to same headers with different case formats can appear in the same message. :param msg: email message of type `email.message.Message` :returns : dictionary of type `requests.structures.CaseInsensitiveDict` :raises ParseError: when an error occurs transforming the message to a dictionary """ def parse_headers(msg): headers = {} for (header, value) in msg.items(): hv = [] for (text, charset) in email.header.decode_header(value): if type(text) == bytes: charset = charset if charset else 'utf-8' try: text = text.decode(charset, errors='surrogateescape') # depends on [control=['try'], data=[]] except (UnicodeError, LookupError): # Try again with a 7bit encoding text = text.decode('ascii', errors='surrogateescape') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] hv.append(text) # depends on [control=['for'], data=[]] v = ' '.join(hv) headers[header] = v if v else None # depends on [control=['for'], data=[]] return headers def parse_payload(msg): body = {} if not msg.is_multipart(): payload = decode_payload(msg) subtype = msg.get_content_subtype() body[subtype] = [payload] # depends on [control=['if'], data=[]] else: # Include all the attached texts if it is multipart # Ignores binary parts by default for part in email.iterators.typed_subpart_iterator(msg): payload = decode_payload(part) subtype = part.get_content_subtype() body.setdefault(subtype, []).append(payload) # depends on [control=['for'], data=['part']] return {k: '\n'.join(v) for (k, v) in body.items()} def decode_payload(msg_or_part): charset = msg_or_part.get_content_charset('utf-8') payload = msg_or_part.get_payload(decode=True) try: payload = payload.decode(charset, errors='surrogateescape') # depends on [control=['try'], data=[]] except (UnicodeError, LookupError): # Try again with a 7bit encoding payload = payload.decode('ascii', errors='surrogateescape') # depends on [control=['except'], data=[]] return payload # The function starts here message = requests.structures.CaseInsensitiveDict() if isinstance(msg, mailbox.mboxMessage): message['unixfrom'] = msg.get_from() # depends on [control=['if'], data=[]] else: message['unixfrom'] = None try: for (k, v) in parse_headers(msg).items(): message[k] = v # depends on [control=['for'], data=[]] message['body'] = parse_payload(msg) # depends on [control=['try'], data=[]] except UnicodeError as e: raise ParseError(cause=str(e)) # depends on [control=['except'], data=['e']] return message
def render_flow(self, data): """render the OpenDocument with the user data @param data: the input stream of user data. This should be a dictionary mapping, keys being the values accessible to your report. @type data: dictionary """ self.render_tree(data) # then reconstruct a new ODT document with the generated content for status in self.__save_output(): yield status
def function[render_flow, parameter[self, data]]: constant[render the OpenDocument with the user data @param data: the input stream of user data. This should be a dictionary mapping, keys being the values accessible to your report. @type data: dictionary ] call[name[self].render_tree, parameter[name[data]]] for taget[name[status]] in starred[call[name[self].__save_output, parameter[]]] begin[:] <ast.Yield object at 0x7da1b2546ef0>
keyword[def] identifier[render_flow] ( identifier[self] , identifier[data] ): literal[string] identifier[self] . identifier[render_tree] ( identifier[data] ) keyword[for] identifier[status] keyword[in] identifier[self] . identifier[__save_output] (): keyword[yield] identifier[status]
def render_flow(self, data): """render the OpenDocument with the user data @param data: the input stream of user data. This should be a dictionary mapping, keys being the values accessible to your report. @type data: dictionary """ self.render_tree(data) # then reconstruct a new ODT document with the generated content for status in self.__save_output(): yield status # depends on [control=['for'], data=['status']]
def _extract_cell_info(self, structure, site_idx, sites, targets, voro, compute_adj_neighbors=False): """Get the information about a certain atom from the results of a tessellation Args: structure (Structure) - Structure being assessed site_idx (int) - Index of the atom in question sites ([Site]) - List of all sites in the tessellation targets ([Element]) - Target elements voro - Output of qvoronoi compute_adj_neighbors (boolean) - Whether to compute which neighbors are adjacent Returns: A dict of sites sharing a common Voronoi facet. Key is facet id (not useful) and values are dictionaries containing statistics about the facet: - site: Pymatgen site - solid_angle - Solid angle subtended by face - angle_normalized - Solid angle normalized such that the faces with the largest - area - Area of the facet - face_dist - Distance between site n and the facet - volume - Volume of Voronoi cell for this face - n_verts - Number of vertices on the facet - adj_neighbors - Facet id's for the adjacent neighbors """ # Get the coordinates of every vertex all_vertices = voro.vertices # Get the coordinates of the central site center_coords = sites[site_idx].coords # Iterate through all the faces in the tessellation results = {} for nn, vind in voro.ridge_dict.items(): # Get only those that include the cite in question if site_idx in nn: other_site = nn[0] if nn[1] == site_idx else nn[1] if -1 in vind: # -1 indices correspond to the Voronoi cell # missing a face if self.allow_pathological: continue else: raise RuntimeError("This structure is pathological," " infinite vertex in the voronoi " "construction") # Get the solid angle of the face facets = [all_vertices[i] for i in vind] angle = solid_angle(center_coords, facets) # Compute the volume of associated with this face volume = 0 # qvoronoi returns vertices in CCW order, so I can break # the face up in to segments (0,1,2), (0,2,3), ... to compute # its area where each number is a vertex size for j, k in zip(vind[1:], vind[2:]): volume += vol_tetra(center_coords, all_vertices[vind[0]], all_vertices[j], all_vertices[k]) # Compute the distance of the site to the face face_dist = np.linalg.norm( center_coords - sites[other_site].coords) / 2 # Compute the area of the face (knowing V=Ad/3) face_area = 3 * volume / face_dist # Compute the normal of the facet normal = np.subtract(sites[other_site].coords, center_coords) normal /= np.linalg.norm(normal) # Store by face index results[other_site] = { 'site': sites[other_site], 'normal': normal, 'solid_angle': angle, 'volume': volume, 'face_dist': face_dist, 'area': face_area, 'n_verts': len(vind) } # If we are computing which neighbors are adjacent, store the vertices if compute_adj_neighbors: results[other_site]['verts'] = vind # Get only target elements resultweighted = {} for nn_index, nstats in results.items(): # Check if this is a target site nn = nstats['site'] if nn.is_ordered: if nn.specie in targets: resultweighted[nn_index] = nstats else: # is nn site is disordered for disordered_sp in nn.species.keys(): if disordered_sp in targets: resultweighted[nn_index] = nstats # If desired, determine which neighbors are adjacent if compute_adj_neighbors: # Initialize storage for the adjacent neighbors adj_neighbors = dict((i, []) for i in resultweighted.keys()) # Find the neighbors that are adjacent by finding those # that contain exactly two vertices for a_ind, a_nninfo in resultweighted.items(): # Get the indices for this site a_verts = set(a_nninfo['verts']) # Loop over all neighbors that have an index lower that this one # The goal here is to exploit the fact that neighbor adjacency is symmetric # (if A is adj to B, B is adj to A) for b_ind, b_nninfo in resultweighted.items(): if b_ind > a_ind: continue if len(a_verts.intersection(b_nninfo['verts'])) == 2: adj_neighbors[a_ind].append(b_ind) adj_neighbors[b_ind].append(a_ind) # Store the results in the nn_info for key, neighbors in adj_neighbors.items(): resultweighted[key]['adj_neighbors'] = neighbors return resultweighted
def function[_extract_cell_info, parameter[self, structure, site_idx, sites, targets, voro, compute_adj_neighbors]]: constant[Get the information about a certain atom from the results of a tessellation Args: structure (Structure) - Structure being assessed site_idx (int) - Index of the atom in question sites ([Site]) - List of all sites in the tessellation targets ([Element]) - Target elements voro - Output of qvoronoi compute_adj_neighbors (boolean) - Whether to compute which neighbors are adjacent Returns: A dict of sites sharing a common Voronoi facet. Key is facet id (not useful) and values are dictionaries containing statistics about the facet: - site: Pymatgen site - solid_angle - Solid angle subtended by face - angle_normalized - Solid angle normalized such that the faces with the largest - area - Area of the facet - face_dist - Distance between site n and the facet - volume - Volume of Voronoi cell for this face - n_verts - Number of vertices on the facet - adj_neighbors - Facet id's for the adjacent neighbors ] variable[all_vertices] assign[=] name[voro].vertices variable[center_coords] assign[=] call[name[sites]][name[site_idx]].coords variable[results] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18fe929e0>, <ast.Name object at 0x7da18fe925c0>]]] in starred[call[name[voro].ridge_dict.items, parameter[]]] begin[:] if compare[name[site_idx] in name[nn]] begin[:] variable[other_site] assign[=] <ast.IfExp object at 0x7da18fe90580> if compare[<ast.UnaryOp object at 0x7da18fe92ec0> in name[vind]] begin[:] if name[self].allow_pathological begin[:] continue variable[facets] assign[=] <ast.ListComp object at 0x7da18fe90460> variable[angle] assign[=] call[name[solid_angle], parameter[name[center_coords], name[facets]]] variable[volume] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da18fe91b10>, <ast.Name object at 0x7da18fe91900>]]] in starred[call[name[zip], parameter[call[name[vind]][<ast.Slice object at 0x7da18fe928f0>], call[name[vind]][<ast.Slice object at 0x7da18fe930d0>]]]] begin[:] <ast.AugAssign object at 0x7da18fe91150> variable[face_dist] assign[=] binary_operation[call[name[np].linalg.norm, parameter[binary_operation[name[center_coords] - call[name[sites]][name[other_site]].coords]]] / constant[2]] variable[face_area] assign[=] binary_operation[binary_operation[constant[3] * name[volume]] / name[face_dist]] variable[normal] assign[=] call[name[np].subtract, parameter[call[name[sites]][name[other_site]].coords, name[center_coords]]] <ast.AugAssign object at 0x7da18fe93370> call[name[results]][name[other_site]] assign[=] dictionary[[<ast.Constant object at 0x7da18fe90640>, <ast.Constant object at 0x7da18fe93850>, <ast.Constant object at 0x7da18fe93460>, <ast.Constant object at 0x7da18fe90c40>, <ast.Constant object at 0x7da18fe93a60>, <ast.Constant object at 0x7da18fe92800>, <ast.Constant object at 0x7da18fe90340>], [<ast.Subscript object at 0x7da18fe91f30>, <ast.Name object at 0x7da18fe92110>, <ast.Name object at 0x7da18fe93550>, <ast.Name object at 0x7da18fe90d00>, <ast.Name object at 0x7da18fe92260>, <ast.Name object at 0x7da18fe93640>, <ast.Call object at 0x7da18fe90940>]] if name[compute_adj_neighbors] begin[:] call[call[name[results]][name[other_site]]][constant[verts]] assign[=] name[vind] variable[resultweighted] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20c7caa70>, <ast.Name object at 0x7da20c7c9ab0>]]] in starred[call[name[results].items, parameter[]]] begin[:] variable[nn] assign[=] call[name[nstats]][constant[site]] if name[nn].is_ordered begin[:] if compare[name[nn].specie in name[targets]] begin[:] call[name[resultweighted]][name[nn_index]] assign[=] name[nstats] if name[compute_adj_neighbors] begin[:] variable[adj_neighbors] assign[=] call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18fe90fa0>]] for taget[tuple[[<ast.Name object at 0x7da18fe92080>, <ast.Name object at 0x7da18fe92c20>]]] in starred[call[name[resultweighted].items, parameter[]]] begin[:] variable[a_verts] assign[=] call[name[set], parameter[call[name[a_nninfo]][constant[verts]]]] for taget[tuple[[<ast.Name object at 0x7da18fe90070>, <ast.Name object at 0x7da18fe93a90>]]] in starred[call[name[resultweighted].items, parameter[]]] begin[:] if compare[name[b_ind] greater[>] name[a_ind]] begin[:] continue if compare[call[name[len], parameter[call[name[a_verts].intersection, parameter[call[name[b_nninfo]][constant[verts]]]]]] equal[==] constant[2]] begin[:] call[call[name[adj_neighbors]][name[a_ind]].append, parameter[name[b_ind]]] call[call[name[adj_neighbors]][name[b_ind]].append, parameter[name[a_ind]]] for taget[tuple[[<ast.Name object at 0x7da18fe92a40>, <ast.Name object at 0x7da18fe918d0>]]] in starred[call[name[adj_neighbors].items, parameter[]]] begin[:] call[call[name[resultweighted]][name[key]]][constant[adj_neighbors]] assign[=] name[neighbors] return[name[resultweighted]]
keyword[def] identifier[_extract_cell_info] ( identifier[self] , identifier[structure] , identifier[site_idx] , identifier[sites] , identifier[targets] , identifier[voro] , identifier[compute_adj_neighbors] = keyword[False] ): literal[string] identifier[all_vertices] = identifier[voro] . identifier[vertices] identifier[center_coords] = identifier[sites] [ identifier[site_idx] ]. identifier[coords] identifier[results] ={} keyword[for] identifier[nn] , identifier[vind] keyword[in] identifier[voro] . identifier[ridge_dict] . identifier[items] (): keyword[if] identifier[site_idx] keyword[in] identifier[nn] : identifier[other_site] = identifier[nn] [ literal[int] ] keyword[if] identifier[nn] [ literal[int] ]== identifier[site_idx] keyword[else] identifier[nn] [ literal[int] ] keyword[if] - literal[int] keyword[in] identifier[vind] : keyword[if] identifier[self] . identifier[allow_pathological] : keyword[continue] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] literal[string] ) identifier[facets] =[ identifier[all_vertices] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[vind] ] identifier[angle] = identifier[solid_angle] ( identifier[center_coords] , identifier[facets] ) identifier[volume] = literal[int] keyword[for] identifier[j] , identifier[k] keyword[in] identifier[zip] ( identifier[vind] [ literal[int] :], identifier[vind] [ literal[int] :]): identifier[volume] += identifier[vol_tetra] ( identifier[center_coords] , identifier[all_vertices] [ identifier[vind] [ literal[int] ]], identifier[all_vertices] [ identifier[j] ], identifier[all_vertices] [ identifier[k] ]) identifier[face_dist] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[center_coords] - identifier[sites] [ identifier[other_site] ]. identifier[coords] )/ literal[int] identifier[face_area] = literal[int] * identifier[volume] / identifier[face_dist] identifier[normal] = identifier[np] . identifier[subtract] ( identifier[sites] [ identifier[other_site] ]. identifier[coords] , identifier[center_coords] ) identifier[normal] /= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[normal] ) identifier[results] [ identifier[other_site] ]={ literal[string] : identifier[sites] [ identifier[other_site] ], literal[string] : identifier[normal] , literal[string] : identifier[angle] , literal[string] : identifier[volume] , literal[string] : identifier[face_dist] , literal[string] : identifier[face_area] , literal[string] : identifier[len] ( identifier[vind] ) } keyword[if] identifier[compute_adj_neighbors] : identifier[results] [ identifier[other_site] ][ literal[string] ]= identifier[vind] identifier[resultweighted] ={} keyword[for] identifier[nn_index] , identifier[nstats] keyword[in] identifier[results] . identifier[items] (): identifier[nn] = identifier[nstats] [ literal[string] ] keyword[if] identifier[nn] . identifier[is_ordered] : keyword[if] identifier[nn] . identifier[specie] keyword[in] identifier[targets] : identifier[resultweighted] [ identifier[nn_index] ]= identifier[nstats] keyword[else] : keyword[for] identifier[disordered_sp] keyword[in] identifier[nn] . identifier[species] . identifier[keys] (): keyword[if] identifier[disordered_sp] keyword[in] identifier[targets] : identifier[resultweighted] [ identifier[nn_index] ]= identifier[nstats] keyword[if] identifier[compute_adj_neighbors] : identifier[adj_neighbors] = identifier[dict] (( identifier[i] ,[]) keyword[for] identifier[i] keyword[in] identifier[resultweighted] . identifier[keys] ()) keyword[for] identifier[a_ind] , identifier[a_nninfo] keyword[in] identifier[resultweighted] . identifier[items] (): identifier[a_verts] = identifier[set] ( identifier[a_nninfo] [ literal[string] ]) keyword[for] identifier[b_ind] , identifier[b_nninfo] keyword[in] identifier[resultweighted] . identifier[items] (): keyword[if] identifier[b_ind] > identifier[a_ind] : keyword[continue] keyword[if] identifier[len] ( identifier[a_verts] . identifier[intersection] ( identifier[b_nninfo] [ literal[string] ]))== literal[int] : identifier[adj_neighbors] [ identifier[a_ind] ]. identifier[append] ( identifier[b_ind] ) identifier[adj_neighbors] [ identifier[b_ind] ]. identifier[append] ( identifier[a_ind] ) keyword[for] identifier[key] , identifier[neighbors] keyword[in] identifier[adj_neighbors] . identifier[items] (): identifier[resultweighted] [ identifier[key] ][ literal[string] ]= identifier[neighbors] keyword[return] identifier[resultweighted]
def _extract_cell_info(self, structure, site_idx, sites, targets, voro, compute_adj_neighbors=False): """Get the information about a certain atom from the results of a tessellation Args: structure (Structure) - Structure being assessed site_idx (int) - Index of the atom in question sites ([Site]) - List of all sites in the tessellation targets ([Element]) - Target elements voro - Output of qvoronoi compute_adj_neighbors (boolean) - Whether to compute which neighbors are adjacent Returns: A dict of sites sharing a common Voronoi facet. Key is facet id (not useful) and values are dictionaries containing statistics about the facet: - site: Pymatgen site - solid_angle - Solid angle subtended by face - angle_normalized - Solid angle normalized such that the faces with the largest - area - Area of the facet - face_dist - Distance between site n and the facet - volume - Volume of Voronoi cell for this face - n_verts - Number of vertices on the facet - adj_neighbors - Facet id's for the adjacent neighbors """ # Get the coordinates of every vertex all_vertices = voro.vertices # Get the coordinates of the central site center_coords = sites[site_idx].coords # Iterate through all the faces in the tessellation results = {} for (nn, vind) in voro.ridge_dict.items(): # Get only those that include the cite in question if site_idx in nn: other_site = nn[0] if nn[1] == site_idx else nn[1] if -1 in vind: # -1 indices correspond to the Voronoi cell # missing a face if self.allow_pathological: continue # depends on [control=['if'], data=[]] else: raise RuntimeError('This structure is pathological, infinite vertex in the voronoi construction') # depends on [control=['if'], data=[]] # Get the solid angle of the face facets = [all_vertices[i] for i in vind] angle = solid_angle(center_coords, facets) # Compute the volume of associated with this face volume = 0 # qvoronoi returns vertices in CCW order, so I can break # the face up in to segments (0,1,2), (0,2,3), ... to compute # its area where each number is a vertex size for (j, k) in zip(vind[1:], vind[2:]): volume += vol_tetra(center_coords, all_vertices[vind[0]], all_vertices[j], all_vertices[k]) # depends on [control=['for'], data=[]] # Compute the distance of the site to the face face_dist = np.linalg.norm(center_coords - sites[other_site].coords) / 2 # Compute the area of the face (knowing V=Ad/3) face_area = 3 * volume / face_dist # Compute the normal of the facet normal = np.subtract(sites[other_site].coords, center_coords) normal /= np.linalg.norm(normal) # Store by face index results[other_site] = {'site': sites[other_site], 'normal': normal, 'solid_angle': angle, 'volume': volume, 'face_dist': face_dist, 'area': face_area, 'n_verts': len(vind)} # If we are computing which neighbors are adjacent, store the vertices if compute_adj_neighbors: results[other_site]['verts'] = vind # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['site_idx', 'nn']] # depends on [control=['for'], data=[]] # Get only target elements resultweighted = {} for (nn_index, nstats) in results.items(): # Check if this is a target site nn = nstats['site'] if nn.is_ordered: if nn.specie in targets: resultweighted[nn_index] = nstats # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # is nn site is disordered for disordered_sp in nn.species.keys(): if disordered_sp in targets: resultweighted[nn_index] = nstats # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['disordered_sp']] # depends on [control=['for'], data=[]] # If desired, determine which neighbors are adjacent if compute_adj_neighbors: # Initialize storage for the adjacent neighbors adj_neighbors = dict(((i, []) for i in resultweighted.keys())) # Find the neighbors that are adjacent by finding those # that contain exactly two vertices for (a_ind, a_nninfo) in resultweighted.items(): # Get the indices for this site a_verts = set(a_nninfo['verts']) # Loop over all neighbors that have an index lower that this one # The goal here is to exploit the fact that neighbor adjacency is symmetric # (if A is adj to B, B is adj to A) for (b_ind, b_nninfo) in resultweighted.items(): if b_ind > a_ind: continue # depends on [control=['if'], data=[]] if len(a_verts.intersection(b_nninfo['verts'])) == 2: adj_neighbors[a_ind].append(b_ind) adj_neighbors[b_ind].append(a_ind) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # Store the results in the nn_info for (key, neighbors) in adj_neighbors.items(): resultweighted[key]['adj_neighbors'] = neighbors # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return resultweighted
def remove_plugin_filepaths(self, filepaths): """ Removes `filepaths` from `self.plugin_filepaths`. Recommend passing in absolute filepaths. Method will attempt to convert to absolute paths if not passed in. `filepaths` can be a single object or an iterable. """ filepaths = util.to_absolute_paths(filepaths) self.plugin_filepaths = util.remove_from_set(self.plugin_filepaths, filepaths)
def function[remove_plugin_filepaths, parameter[self, filepaths]]: constant[ Removes `filepaths` from `self.plugin_filepaths`. Recommend passing in absolute filepaths. Method will attempt to convert to absolute paths if not passed in. `filepaths` can be a single object or an iterable. ] variable[filepaths] assign[=] call[name[util].to_absolute_paths, parameter[name[filepaths]]] name[self].plugin_filepaths assign[=] call[name[util].remove_from_set, parameter[name[self].plugin_filepaths, name[filepaths]]]
keyword[def] identifier[remove_plugin_filepaths] ( identifier[self] , identifier[filepaths] ): literal[string] identifier[filepaths] = identifier[util] . identifier[to_absolute_paths] ( identifier[filepaths] ) identifier[self] . identifier[plugin_filepaths] = identifier[util] . identifier[remove_from_set] ( identifier[self] . identifier[plugin_filepaths] , identifier[filepaths] )
def remove_plugin_filepaths(self, filepaths): """ Removes `filepaths` from `self.plugin_filepaths`. Recommend passing in absolute filepaths. Method will attempt to convert to absolute paths if not passed in. `filepaths` can be a single object or an iterable. """ filepaths = util.to_absolute_paths(filepaths) self.plugin_filepaths = util.remove_from_set(self.plugin_filepaths, filepaths)
def get_product_set( self, location, product_set_id, project_id=None, retry=None, timeout=None, metadata=None ): """ For the documentation see: :class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` """ client = self.get_conn() name = ProductSearchClient.product_set_path(project_id, location, product_set_id) self.log.info('Retrieving ProductSet: %s', name) response = client.get_product_set(name=name, retry=retry, timeout=timeout, metadata=metadata) self.log.info('ProductSet retrieved.') self.log.debug('ProductSet retrieved:\n%s', response) return MessageToDict(response)
def function[get_product_set, parameter[self, location, product_set_id, project_id, retry, timeout, metadata]]: constant[ For the documentation see: :class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` ] variable[client] assign[=] call[name[self].get_conn, parameter[]] variable[name] assign[=] call[name[ProductSearchClient].product_set_path, parameter[name[project_id], name[location], name[product_set_id]]] call[name[self].log.info, parameter[constant[Retrieving ProductSet: %s], name[name]]] variable[response] assign[=] call[name[client].get_product_set, parameter[]] call[name[self].log.info, parameter[constant[ProductSet retrieved.]]] call[name[self].log.debug, parameter[constant[ProductSet retrieved: %s], name[response]]] return[call[name[MessageToDict], parameter[name[response]]]]
keyword[def] identifier[get_product_set] ( identifier[self] , identifier[location] , identifier[product_set_id] , identifier[project_id] = keyword[None] , identifier[retry] = keyword[None] , identifier[timeout] = keyword[None] , identifier[metadata] = keyword[None] ): literal[string] identifier[client] = identifier[self] . identifier[get_conn] () identifier[name] = identifier[ProductSearchClient] . identifier[product_set_path] ( identifier[project_id] , identifier[location] , identifier[product_set_id] ) identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[name] ) identifier[response] = identifier[client] . identifier[get_product_set] ( identifier[name] = identifier[name] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata] ) identifier[self] . identifier[log] . identifier[info] ( literal[string] ) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[response] ) keyword[return] identifier[MessageToDict] ( identifier[response] )
def get_product_set(self, location, product_set_id, project_id=None, retry=None, timeout=None, metadata=None): """ For the documentation see: :class:`~airflow.contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator` """ client = self.get_conn() name = ProductSearchClient.product_set_path(project_id, location, product_set_id) self.log.info('Retrieving ProductSet: %s', name) response = client.get_product_set(name=name, retry=retry, timeout=timeout, metadata=metadata) self.log.info('ProductSet retrieved.') self.log.debug('ProductSet retrieved:\n%s', response) return MessageToDict(response)
def request_headers(self): '''Fill request headers from the environ dictionary and modify them via the list of :attr:`headers_middleware`. The returned headers will be sent to the target uri. ''' headers = CIMultiDict() for k in self.environ: if k.startswith('HTTP_'): head = k[5:].replace('_', '-') headers[head] = self.environ[k] for head in ENVIRON_HEADERS: k = head.replace('-', '_').upper() v = self.environ.get(k) if v: headers[head] = v for middleware in self.wsgi.headers_middleware: middleware(self.environ, headers) return headers
def function[request_headers, parameter[self]]: constant[Fill request headers from the environ dictionary and modify them via the list of :attr:`headers_middleware`. The returned headers will be sent to the target uri. ] variable[headers] assign[=] call[name[CIMultiDict], parameter[]] for taget[name[k]] in starred[name[self].environ] begin[:] if call[name[k].startswith, parameter[constant[HTTP_]]] begin[:] variable[head] assign[=] call[call[name[k]][<ast.Slice object at 0x7da20c991c00>].replace, parameter[constant[_], constant[-]]] call[name[headers]][name[head]] assign[=] call[name[self].environ][name[k]] for taget[name[head]] in starred[name[ENVIRON_HEADERS]] begin[:] variable[k] assign[=] call[call[name[head].replace, parameter[constant[-], constant[_]]].upper, parameter[]] variable[v] assign[=] call[name[self].environ.get, parameter[name[k]]] if name[v] begin[:] call[name[headers]][name[head]] assign[=] name[v] for taget[name[middleware]] in starred[name[self].wsgi.headers_middleware] begin[:] call[name[middleware], parameter[name[self].environ, name[headers]]] return[name[headers]]
keyword[def] identifier[request_headers] ( identifier[self] ): literal[string] identifier[headers] = identifier[CIMultiDict] () keyword[for] identifier[k] keyword[in] identifier[self] . identifier[environ] : keyword[if] identifier[k] . identifier[startswith] ( literal[string] ): identifier[head] = identifier[k] [ literal[int] :]. identifier[replace] ( literal[string] , literal[string] ) identifier[headers] [ identifier[head] ]= identifier[self] . identifier[environ] [ identifier[k] ] keyword[for] identifier[head] keyword[in] identifier[ENVIRON_HEADERS] : identifier[k] = identifier[head] . identifier[replace] ( literal[string] , literal[string] ). identifier[upper] () identifier[v] = identifier[self] . identifier[environ] . identifier[get] ( identifier[k] ) keyword[if] identifier[v] : identifier[headers] [ identifier[head] ]= identifier[v] keyword[for] identifier[middleware] keyword[in] identifier[self] . identifier[wsgi] . identifier[headers_middleware] : identifier[middleware] ( identifier[self] . identifier[environ] , identifier[headers] ) keyword[return] identifier[headers]
def request_headers(self): """Fill request headers from the environ dictionary and modify them via the list of :attr:`headers_middleware`. The returned headers will be sent to the target uri. """ headers = CIMultiDict() for k in self.environ: if k.startswith('HTTP_'): head = k[5:].replace('_', '-') headers[head] = self.environ[k] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] for head in ENVIRON_HEADERS: k = head.replace('-', '_').upper() v = self.environ.get(k) if v: headers[head] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['head']] for middleware in self.wsgi.headers_middleware: middleware(self.environ, headers) # depends on [control=['for'], data=['middleware']] return headers
def get_maxweight(self, weight, concurrent_tasks, minweight=MINWEIGHT): """ Return an appropriate maxweight for use in the block_splitter """ totweight = self.get_weight(weight) ct = concurrent_tasks or 1 mw = math.ceil(totweight / ct) return max(mw, minweight)
def function[get_maxweight, parameter[self, weight, concurrent_tasks, minweight]]: constant[ Return an appropriate maxweight for use in the block_splitter ] variable[totweight] assign[=] call[name[self].get_weight, parameter[name[weight]]] variable[ct] assign[=] <ast.BoolOp object at 0x7da2054a6320> variable[mw] assign[=] call[name[math].ceil, parameter[binary_operation[name[totweight] / name[ct]]]] return[call[name[max], parameter[name[mw], name[minweight]]]]
keyword[def] identifier[get_maxweight] ( identifier[self] , identifier[weight] , identifier[concurrent_tasks] , identifier[minweight] = identifier[MINWEIGHT] ): literal[string] identifier[totweight] = identifier[self] . identifier[get_weight] ( identifier[weight] ) identifier[ct] = identifier[concurrent_tasks] keyword[or] literal[int] identifier[mw] = identifier[math] . identifier[ceil] ( identifier[totweight] / identifier[ct] ) keyword[return] identifier[max] ( identifier[mw] , identifier[minweight] )
def get_maxweight(self, weight, concurrent_tasks, minweight=MINWEIGHT): """ Return an appropriate maxweight for use in the block_splitter """ totweight = self.get_weight(weight) ct = concurrent_tasks or 1 mw = math.ceil(totweight / ct) return max(mw, minweight)
def transpose(self): """Return the transpose of the operator.""" return Operator( np.transpose(self.data), self.input_dims(), self.output_dims())
def function[transpose, parameter[self]]: constant[Return the transpose of the operator.] return[call[name[Operator], parameter[call[name[np].transpose, parameter[name[self].data]], call[name[self].input_dims, parameter[]], call[name[self].output_dims, parameter[]]]]]
keyword[def] identifier[transpose] ( identifier[self] ): literal[string] keyword[return] identifier[Operator] ( identifier[np] . identifier[transpose] ( identifier[self] . identifier[data] ), identifier[self] . identifier[input_dims] (), identifier[self] . identifier[output_dims] ())
def transpose(self): """Return the transpose of the operator.""" return Operator(np.transpose(self.data), self.input_dims(), self.output_dims())
def clear_waiting_coordinators(self, cancel=False): ''' remove all entries from waiting queue or cancell all in waiting queue ''' with self._lockw: if cancel: for _coordinator in self._waiting_transfer_coordinators: _coordinator.notify_cancelled("Clear Waiting Queue", False) self._waiting_transfer_coordinators.clear()
def function[clear_waiting_coordinators, parameter[self, cancel]]: constant[ remove all entries from waiting queue or cancell all in waiting queue ] with name[self]._lockw begin[:] if name[cancel] begin[:] for taget[name[_coordinator]] in starred[name[self]._waiting_transfer_coordinators] begin[:] call[name[_coordinator].notify_cancelled, parameter[constant[Clear Waiting Queue], constant[False]]] call[name[self]._waiting_transfer_coordinators.clear, parameter[]]
keyword[def] identifier[clear_waiting_coordinators] ( identifier[self] , identifier[cancel] = keyword[False] ): literal[string] keyword[with] identifier[self] . identifier[_lockw] : keyword[if] identifier[cancel] : keyword[for] identifier[_coordinator] keyword[in] identifier[self] . identifier[_waiting_transfer_coordinators] : identifier[_coordinator] . identifier[notify_cancelled] ( literal[string] , keyword[False] ) identifier[self] . identifier[_waiting_transfer_coordinators] . identifier[clear] ()
def clear_waiting_coordinators(self, cancel=False): """ remove all entries from waiting queue or cancell all in waiting queue """ with self._lockw: if cancel: for _coordinator in self._waiting_transfer_coordinators: _coordinator.notify_cancelled('Clear Waiting Queue', False) # depends on [control=['for'], data=['_coordinator']] # depends on [control=['if'], data=[]] self._waiting_transfer_coordinators.clear() # depends on [control=['with'], data=[]]
def register(func=None, name=None): """ Expose compiler to factory. :param func: the callable to expose :type func: callable :param name: name of format :type name: str It can be used as a decorator:: @register(name='my:validator') def my_validator(obj): if obj is True: return obj raise ValidationError('obj is not true') or as a function:: def my_validator(obj): if obj is True: return obj raise ValidationError('obj is not true') @register(name='my:validator') """ if not name: raise CompilationError('Name is required') if not func: return partial(register, name=name) return FormatRegistry.register(name, func)
def function[register, parameter[func, name]]: constant[ Expose compiler to factory. :param func: the callable to expose :type func: callable :param name: name of format :type name: str It can be used as a decorator:: @register(name='my:validator') def my_validator(obj): if obj is True: return obj raise ValidationError('obj is not true') or as a function:: def my_validator(obj): if obj is True: return obj raise ValidationError('obj is not true') @register(name='my:validator') ] if <ast.UnaryOp object at 0x7da1b2594250> begin[:] <ast.Raise object at 0x7da1b2596800> if <ast.UnaryOp object at 0x7da1b2596ad0> begin[:] return[call[name[partial], parameter[name[register]]]] return[call[name[FormatRegistry].register, parameter[name[name], name[func]]]]
keyword[def] identifier[register] ( identifier[func] = keyword[None] , identifier[name] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[name] : keyword[raise] identifier[CompilationError] ( literal[string] ) keyword[if] keyword[not] identifier[func] : keyword[return] identifier[partial] ( identifier[register] , identifier[name] = identifier[name] ) keyword[return] identifier[FormatRegistry] . identifier[register] ( identifier[name] , identifier[func] )
def register(func=None, name=None): """ Expose compiler to factory. :param func: the callable to expose :type func: callable :param name: name of format :type name: str It can be used as a decorator:: @register(name='my:validator') def my_validator(obj): if obj is True: return obj raise ValidationError('obj is not true') or as a function:: def my_validator(obj): if obj is True: return obj raise ValidationError('obj is not true') @register(name='my:validator') """ if not name: raise CompilationError('Name is required') # depends on [control=['if'], data=[]] if not func: return partial(register, name=name) # depends on [control=['if'], data=[]] return FormatRegistry.register(name, func)
def rest_equals(self, rest_object): """ Compare objects REST attributes """ if not self.equals(rest_object): return False return self.to_dict() == rest_object.to_dict()
def function[rest_equals, parameter[self, rest_object]]: constant[ Compare objects REST attributes ] if <ast.UnaryOp object at 0x7da1b0d1dd80> begin[:] return[constant[False]] return[compare[call[name[self].to_dict, parameter[]] equal[==] call[name[rest_object].to_dict, parameter[]]]]
keyword[def] identifier[rest_equals] ( identifier[self] , identifier[rest_object] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[equals] ( identifier[rest_object] ): keyword[return] keyword[False] keyword[return] identifier[self] . identifier[to_dict] ()== identifier[rest_object] . identifier[to_dict] ()
def rest_equals(self, rest_object): """ Compare objects REST attributes """ if not self.equals(rest_object): return False # depends on [control=['if'], data=[]] return self.to_dict() == rest_object.to_dict()
def _link_package_versions(self, link, search_name): """ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. """ if link.egg_fragment: egg_info = link.egg_fragment else: egg_info, ext = link.splitext() if not ext: if link not in self.logged_links: logger.debug('Skipping link %s; not a file' % link) self.logged_links.add(link) return [] if egg_info.endswith('.tar'): # Special double-extension case: egg_info = egg_info[:-4] ext = '.tar' + ext if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'): if link not in self.logged_links: logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext)) self.logged_links.add(link) return [] version = self._egg_info_matches(egg_info, search_name, link) if version is None: logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) return [] match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: logger.debug('Skipping %s because Python version is incorrect' % link) return [] logger.debug('Found link %s, version: %s' % (link, version)) return [(pkg_resources.parse_version(version), link, version)]
def function[_link_package_versions, parameter[self, link, search_name]]: constant[ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. ] if name[link].egg_fragment begin[:] variable[egg_info] assign[=] name[link].egg_fragment variable[version] assign[=] call[name[self]._egg_info_matches, parameter[name[egg_info], name[search_name], name[link]]] if compare[name[version] is constant[None]] begin[:] call[name[logger].debug, parameter[binary_operation[constant[Skipping link %s; wrong project name (not %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f999c0>, <ast.Name object at 0x7da207f9b250>]]]]] return[list[[]]] variable[match] assign[=] call[name[self]._py_version_re.search, parameter[name[version]]] if name[match] begin[:] variable[version] assign[=] call[name[version]][<ast.Slice object at 0x7da207f98910>] variable[py_version] assign[=] call[name[match].group, parameter[constant[1]]] if compare[name[py_version] not_equal[!=] call[name[sys].version][<ast.Slice object at 0x7da207f9a6b0>]] begin[:] call[name[logger].debug, parameter[binary_operation[constant[Skipping %s because Python version is incorrect] <ast.Mod object at 0x7da2590d6920> name[link]]]] return[list[[]]] call[name[logger].debug, parameter[binary_operation[constant[Found link %s, version: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f9ab90>, <ast.Name object at 0x7da207f99420>]]]]] return[list[[<ast.Tuple object at 0x7da1b0b38b50>]]]
keyword[def] identifier[_link_package_versions] ( identifier[self] , identifier[link] , identifier[search_name] ): literal[string] keyword[if] identifier[link] . identifier[egg_fragment] : identifier[egg_info] = identifier[link] . identifier[egg_fragment] keyword[else] : identifier[egg_info] , identifier[ext] = identifier[link] . identifier[splitext] () keyword[if] keyword[not] identifier[ext] : keyword[if] identifier[link] keyword[not] keyword[in] identifier[self] . identifier[logged_links] : identifier[logger] . identifier[debug] ( literal[string] % identifier[link] ) identifier[self] . identifier[logged_links] . identifier[add] ( identifier[link] ) keyword[return] [] keyword[if] identifier[egg_info] . identifier[endswith] ( literal[string] ): identifier[egg_info] = identifier[egg_info] [:- literal[int] ] identifier[ext] = literal[string] + identifier[ext] keyword[if] identifier[ext] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ): keyword[if] identifier[link] keyword[not] keyword[in] identifier[self] . identifier[logged_links] : identifier[logger] . identifier[debug] ( literal[string] %( identifier[link] , identifier[ext] )) identifier[self] . identifier[logged_links] . identifier[add] ( identifier[link] ) keyword[return] [] identifier[version] = identifier[self] . identifier[_egg_info_matches] ( identifier[egg_info] , identifier[search_name] , identifier[link] ) keyword[if] identifier[version] keyword[is] keyword[None] : identifier[logger] . identifier[debug] ( literal[string] %( identifier[link] , identifier[search_name] )) keyword[return] [] identifier[match] = identifier[self] . identifier[_py_version_re] . identifier[search] ( identifier[version] ) keyword[if] identifier[match] : identifier[version] = identifier[version] [: identifier[match] . identifier[start] ()] identifier[py_version] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] identifier[py_version] != identifier[sys] . identifier[version] [: literal[int] ]: identifier[logger] . identifier[debug] ( literal[string] % identifier[link] ) keyword[return] [] identifier[logger] . identifier[debug] ( literal[string] %( identifier[link] , identifier[version] )) keyword[return] [( identifier[pkg_resources] . identifier[parse_version] ( identifier[version] ), identifier[link] , identifier[version] )]
def _link_package_versions(self, link, search_name): """ Return an iterable of triples (pkg_resources_version_key, link, python_version) that can be extracted from the given link. Meant to be overridden by subclasses, not called by clients. """ if link.egg_fragment: egg_info = link.egg_fragment # depends on [control=['if'], data=[]] else: (egg_info, ext) = link.splitext() if not ext: if link not in self.logged_links: logger.debug('Skipping link %s; not a file' % link) self.logged_links.add(link) # depends on [control=['if'], data=['link']] return [] # depends on [control=['if'], data=[]] if egg_info.endswith('.tar'): # Special double-extension case: egg_info = egg_info[:-4] ext = '.tar' + ext # depends on [control=['if'], data=[]] if ext not in ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip'): if link not in self.logged_links: logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext)) self.logged_links.add(link) # depends on [control=['if'], data=['link']] return [] # depends on [control=['if'], data=['ext']] version = self._egg_info_matches(egg_info, search_name, link) if version is None: logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name)) return [] # depends on [control=['if'], data=[]] match = self._py_version_re.search(version) if match: version = version[:match.start()] py_version = match.group(1) if py_version != sys.version[:3]: logger.debug('Skipping %s because Python version is incorrect' % link) return [] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] logger.debug('Found link %s, version: %s' % (link, version)) return [(pkg_resources.parse_version(version), link, version)]
def parent_dir(path): '''Return the parent of a directory.''' return os.path.abspath(os.path.join(path, os.pardir, os.pardir, '_build'))
def function[parent_dir, parameter[path]]: constant[Return the parent of a directory.] return[call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[path], name[os].pardir, name[os].pardir, constant[_build]]]]]]
keyword[def] identifier[parent_dir] ( identifier[path] ): literal[string] keyword[return] identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[os] . identifier[pardir] , identifier[os] . identifier[pardir] , literal[string] ))
def parent_dir(path): """Return the parent of a directory.""" return os.path.abspath(os.path.join(path, os.pardir, os.pardir, '_build'))
def sparql(self, select='*', body=None, inject_prefixes=None, single_column=False): """ Execute a SPARQL query. The query is specified using `select` and `body` parameters. The argument for the Named Graph is injected into the query. The select parameter should be either '*' or a list of vars (not prefixed with '?'). - If '*' is passed, then the result is a list of dicts, { $var: {value: $val } } - If a list of vars is passed, then the result is a list of lists - Unless single_column=True, in which case the results are a simple list of values from the first var The inject_prefixes argument can be used to inject a list of prefixes - these are expanded using the prefixcommons library """ if inject_prefixes is None: inject_prefixes = [] namedGraph = get_named_graph(self.handle) cols = [] select_val = None if select is None or select=='*': if not single_column: cols=None select_val='*' else: if isinstance(cols,list): cols = [select] else: cols = select select_val = ", ".join(['?'+c for c in cols]) prefixes = "" if inject_prefixes is not None: plist = ["prefix {}: <{}> ".format(p,expand_uri(p+":")) for p in inject_prefixes if p != "" and p is not None] prefixes = "\n".join(plist) query = """ {prefixes} SELECT {s} WHERE {{ GRAPH <{g}> {{ {b} }} }} """.format(prefixes=prefixes, s=select_val, b=body, g=namedGraph) bindings = run_sparql(query) if len(bindings) == 0: return [] if cols is None: return bindings else: if single_column: c = list(bindings[0].keys())[0] return [r[c]['value'] for r in bindings] else: return [r[c]['value'] for c in cols for r in bindings]
def function[sparql, parameter[self, select, body, inject_prefixes, single_column]]: constant[ Execute a SPARQL query. The query is specified using `select` and `body` parameters. The argument for the Named Graph is injected into the query. The select parameter should be either '*' or a list of vars (not prefixed with '?'). - If '*' is passed, then the result is a list of dicts, { $var: {value: $val } } - If a list of vars is passed, then the result is a list of lists - Unless single_column=True, in which case the results are a simple list of values from the first var The inject_prefixes argument can be used to inject a list of prefixes - these are expanded using the prefixcommons library ] if compare[name[inject_prefixes] is constant[None]] begin[:] variable[inject_prefixes] assign[=] list[[]] variable[namedGraph] assign[=] call[name[get_named_graph], parameter[name[self].handle]] variable[cols] assign[=] list[[]] variable[select_val] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b0730670> begin[:] if <ast.UnaryOp object at 0x7da1b0731510> begin[:] variable[cols] assign[=] constant[None] variable[select_val] assign[=] constant[*] variable[prefixes] assign[=] constant[] if compare[name[inject_prefixes] is_not constant[None]] begin[:] variable[plist] assign[=] <ast.ListComp object at 0x7da1b07463e0> variable[prefixes] assign[=] call[constant[ ].join, parameter[name[plist]]] variable[query] assign[=] call[constant[ {prefixes} SELECT {s} WHERE {{ GRAPH <{g}> {{ {b} }} }} ].format, parameter[]] variable[bindings] assign[=] call[name[run_sparql], parameter[name[query]]] if compare[call[name[len], parameter[name[bindings]]] equal[==] constant[0]] begin[:] return[list[[]]] if compare[name[cols] is constant[None]] begin[:] return[name[bindings]]
keyword[def] identifier[sparql] ( identifier[self] , identifier[select] = literal[string] , identifier[body] = keyword[None] , identifier[inject_prefixes] = keyword[None] , identifier[single_column] = keyword[False] ): literal[string] keyword[if] identifier[inject_prefixes] keyword[is] keyword[None] : identifier[inject_prefixes] =[] identifier[namedGraph] = identifier[get_named_graph] ( identifier[self] . identifier[handle] ) identifier[cols] =[] identifier[select_val] = keyword[None] keyword[if] identifier[select] keyword[is] keyword[None] keyword[or] identifier[select] == literal[string] : keyword[if] keyword[not] identifier[single_column] : identifier[cols] = keyword[None] identifier[select_val] = literal[string] keyword[else] : keyword[if] identifier[isinstance] ( identifier[cols] , identifier[list] ): identifier[cols] =[ identifier[select] ] keyword[else] : identifier[cols] = identifier[select] identifier[select_val] = literal[string] . identifier[join] ([ literal[string] + identifier[c] keyword[for] identifier[c] keyword[in] identifier[cols] ]) identifier[prefixes] = literal[string] keyword[if] identifier[inject_prefixes] keyword[is] keyword[not] keyword[None] : identifier[plist] =[ literal[string] . identifier[format] ( identifier[p] , identifier[expand_uri] ( identifier[p] + literal[string] )) keyword[for] identifier[p] keyword[in] identifier[inject_prefixes] keyword[if] identifier[p] != literal[string] keyword[and] identifier[p] keyword[is] keyword[not] keyword[None] ] identifier[prefixes] = literal[string] . identifier[join] ( identifier[plist] ) identifier[query] = literal[string] . identifier[format] ( identifier[prefixes] = identifier[prefixes] , identifier[s] = identifier[select_val] , identifier[b] = identifier[body] , identifier[g] = identifier[namedGraph] ) identifier[bindings] = identifier[run_sparql] ( identifier[query] ) keyword[if] identifier[len] ( identifier[bindings] )== literal[int] : keyword[return] [] keyword[if] identifier[cols] keyword[is] keyword[None] : keyword[return] identifier[bindings] keyword[else] : keyword[if] identifier[single_column] : identifier[c] = identifier[list] ( identifier[bindings] [ literal[int] ]. identifier[keys] ())[ literal[int] ] keyword[return] [ identifier[r] [ identifier[c] ][ literal[string] ] keyword[for] identifier[r] keyword[in] identifier[bindings] ] keyword[else] : keyword[return] [ identifier[r] [ identifier[c] ][ literal[string] ] keyword[for] identifier[c] keyword[in] identifier[cols] keyword[for] identifier[r] keyword[in] identifier[bindings] ]
def sparql(self, select='*', body=None, inject_prefixes=None, single_column=False): """ Execute a SPARQL query. The query is specified using `select` and `body` parameters. The argument for the Named Graph is injected into the query. The select parameter should be either '*' or a list of vars (not prefixed with '?'). - If '*' is passed, then the result is a list of dicts, { $var: {value: $val } } - If a list of vars is passed, then the result is a list of lists - Unless single_column=True, in which case the results are a simple list of values from the first var The inject_prefixes argument can be used to inject a list of prefixes - these are expanded using the prefixcommons library """ if inject_prefixes is None: inject_prefixes = [] # depends on [control=['if'], data=['inject_prefixes']] namedGraph = get_named_graph(self.handle) cols = [] select_val = None if select is None or select == '*': if not single_column: cols = None # depends on [control=['if'], data=[]] select_val = '*' # depends on [control=['if'], data=[]] else: if isinstance(cols, list): cols = [select] # depends on [control=['if'], data=[]] else: cols = select select_val = ', '.join(['?' + c for c in cols]) prefixes = '' if inject_prefixes is not None: plist = ['prefix {}: <{}> '.format(p, expand_uri(p + ':')) for p in inject_prefixes if p != '' and p is not None] prefixes = '\n'.join(plist) # depends on [control=['if'], data=['inject_prefixes']] query = '\n {prefixes}\n SELECT {s} WHERE {{\n GRAPH <{g}> {{\n {b}\n }}\n }}\n '.format(prefixes=prefixes, s=select_val, b=body, g=namedGraph) bindings = run_sparql(query) if len(bindings) == 0: return [] # depends on [control=['if'], data=[]] if cols is None: return bindings # depends on [control=['if'], data=[]] elif single_column: c = list(bindings[0].keys())[0] return [r[c]['value'] for r in bindings] # depends on [control=['if'], data=[]] else: return [r[c]['value'] for c in cols for r in bindings]
def check_for_bucket(self, bucket_name): """ Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str """ try: self.get_conn().head_bucket(Bucket=bucket_name) return True except ClientError as e: self.log.info(e.response["Error"]["Message"]) return False
def function[check_for_bucket, parameter[self, bucket_name]]: constant[ Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str ] <ast.Try object at 0x7da18bccaa10>
keyword[def] identifier[check_for_bucket] ( identifier[self] , identifier[bucket_name] ): literal[string] keyword[try] : identifier[self] . identifier[get_conn] (). identifier[head_bucket] ( identifier[Bucket] = identifier[bucket_name] ) keyword[return] keyword[True] keyword[except] identifier[ClientError] keyword[as] identifier[e] : identifier[self] . identifier[log] . identifier[info] ( identifier[e] . identifier[response] [ literal[string] ][ literal[string] ]) keyword[return] keyword[False]
def check_for_bucket(self, bucket_name): """ Check if bucket_name exists. :param bucket_name: the name of the bucket :type bucket_name: str """ try: self.get_conn().head_bucket(Bucket=bucket_name) return True # depends on [control=['try'], data=[]] except ClientError as e: self.log.info(e.response['Error']['Message']) return False # depends on [control=['except'], data=['e']]
def check_serial_port(name): """returns valid COM Port.""" try: cdc = next(serial.tools.list_ports.grep(name)) return cdc[0] except StopIteration: msg = "device {} not found. ".format(name) msg += "available devices are: " ports = list(serial.tools.list_ports.comports()) for p in ports: msg += "{},".format(text_type(p)) raise ValueError(msg)
def function[check_serial_port, parameter[name]]: constant[returns valid COM Port.] <ast.Try object at 0x7da1b1fdff10>
keyword[def] identifier[check_serial_port] ( identifier[name] ): literal[string] keyword[try] : identifier[cdc] = identifier[next] ( identifier[serial] . identifier[tools] . identifier[list_ports] . identifier[grep] ( identifier[name] )) keyword[return] identifier[cdc] [ literal[int] ] keyword[except] identifier[StopIteration] : identifier[msg] = literal[string] . identifier[format] ( identifier[name] ) identifier[msg] += literal[string] identifier[ports] = identifier[list] ( identifier[serial] . identifier[tools] . identifier[list_ports] . identifier[comports] ()) keyword[for] identifier[p] keyword[in] identifier[ports] : identifier[msg] += literal[string] . identifier[format] ( identifier[text_type] ( identifier[p] )) keyword[raise] identifier[ValueError] ( identifier[msg] )
def check_serial_port(name): """returns valid COM Port.""" try: cdc = next(serial.tools.list_ports.grep(name)) return cdc[0] # depends on [control=['try'], data=[]] except StopIteration: msg = 'device {} not found. '.format(name) msg += 'available devices are: ' ports = list(serial.tools.list_ports.comports()) for p in ports: msg += '{},'.format(text_type(p)) # depends on [control=['for'], data=['p']] raise ValueError(msg) # depends on [control=['except'], data=[]]
def _split_path(path): """split a path return by the api return - the sentinel: - the rest of the path as a list. - the original path stripped of / for normalisation. """ path = path.strip('/') list_path = path.split('/') sentinel = list_path.pop(0) return sentinel, list_path, path
def function[_split_path, parameter[path]]: constant[split a path return by the api return - the sentinel: - the rest of the path as a list. - the original path stripped of / for normalisation. ] variable[path] assign[=] call[name[path].strip, parameter[constant[/]]] variable[list_path] assign[=] call[name[path].split, parameter[constant[/]]] variable[sentinel] assign[=] call[name[list_path].pop, parameter[constant[0]]] return[tuple[[<ast.Name object at 0x7da1b23460b0>, <ast.Name object at 0x7da1b2346770>, <ast.Name object at 0x7da1b2346470>]]]
keyword[def] identifier[_split_path] ( identifier[path] ): literal[string] identifier[path] = identifier[path] . identifier[strip] ( literal[string] ) identifier[list_path] = identifier[path] . identifier[split] ( literal[string] ) identifier[sentinel] = identifier[list_path] . identifier[pop] ( literal[int] ) keyword[return] identifier[sentinel] , identifier[list_path] , identifier[path]
def _split_path(path): """split a path return by the api return - the sentinel: - the rest of the path as a list. - the original path stripped of / for normalisation. """ path = path.strip('/') list_path = path.split('/') sentinel = list_path.pop(0) return (sentinel, list_path, path)
def is_alpha(self): """Asserts that val is non-empty string and all characters are alphabetic.""" if not isinstance(self.val, str_types): raise TypeError('val is not a string') if len(self.val) == 0: raise ValueError('val is empty') if not self.val.isalpha(): self._err('Expected <%s> to contain only alphabetic chars, but did not.' % self.val) return self
def function[is_alpha, parameter[self]]: constant[Asserts that val is non-empty string and all characters are alphabetic.] if <ast.UnaryOp object at 0x7da1b0126590> begin[:] <ast.Raise object at 0x7da1b0126650> if compare[call[name[len], parameter[name[self].val]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b0125630> if <ast.UnaryOp object at 0x7da1b01278e0> begin[:] call[name[self]._err, parameter[binary_operation[constant[Expected <%s> to contain only alphabetic chars, but did not.] <ast.Mod object at 0x7da2590d6920> name[self].val]]] return[name[self]]
keyword[def] identifier[is_alpha] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[val] , identifier[str_types] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[len] ( identifier[self] . identifier[val] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[self] . identifier[val] . identifier[isalpha] (): identifier[self] . identifier[_err] ( literal[string] % identifier[self] . identifier[val] ) keyword[return] identifier[self]
def is_alpha(self): """Asserts that val is non-empty string and all characters are alphabetic.""" if not isinstance(self.val, str_types): raise TypeError('val is not a string') # depends on [control=['if'], data=[]] if len(self.val) == 0: raise ValueError('val is empty') # depends on [control=['if'], data=[]] if not self.val.isalpha(): self._err('Expected <%s> to contain only alphabetic chars, but did not.' % self.val) # depends on [control=['if'], data=[]] return self
def get_children(parent, idx): """Gets the child at parent[idx], or all the children if idx == "*".""" if isinstance(parent, dict): if idx in parent: yield parent[idx] else: raise JSONPathError("Invalid path at {0}".format(idx)) elif isinstance(parent, list): if idx == "*": yield from parent else: is_int, i = try_parse_int(idx) if is_int and i >= 0 and i < len(parent): yield parent[i] else: raise JSONPathError("Invalid list index: {0}".format(i)) else: raise JSONPathError("Type {0} does not have children".format(type(parent).__name__))
def function[get_children, parameter[parent, idx]]: constant[Gets the child at parent[idx], or all the children if idx == "*".] if call[name[isinstance], parameter[name[parent], name[dict]]] begin[:] if compare[name[idx] in name[parent]] begin[:] <ast.Yield object at 0x7da1b1341b40>
keyword[def] identifier[get_children] ( identifier[parent] , identifier[idx] ): literal[string] keyword[if] identifier[isinstance] ( identifier[parent] , identifier[dict] ): keyword[if] identifier[idx] keyword[in] identifier[parent] : keyword[yield] identifier[parent] [ identifier[idx] ] keyword[else] : keyword[raise] identifier[JSONPathError] ( literal[string] . identifier[format] ( identifier[idx] )) keyword[elif] identifier[isinstance] ( identifier[parent] , identifier[list] ): keyword[if] identifier[idx] == literal[string] : keyword[yield] keyword[from] identifier[parent] keyword[else] : identifier[is_int] , identifier[i] = identifier[try_parse_int] ( identifier[idx] ) keyword[if] identifier[is_int] keyword[and] identifier[i] >= literal[int] keyword[and] identifier[i] < identifier[len] ( identifier[parent] ): keyword[yield] identifier[parent] [ identifier[i] ] keyword[else] : keyword[raise] identifier[JSONPathError] ( literal[string] . identifier[format] ( identifier[i] )) keyword[else] : keyword[raise] identifier[JSONPathError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[parent] ). identifier[__name__] ))
def get_children(parent, idx): """Gets the child at parent[idx], or all the children if idx == "*".""" if isinstance(parent, dict): if idx in parent: yield parent[idx] # depends on [control=['if'], data=['idx', 'parent']] else: raise JSONPathError('Invalid path at {0}'.format(idx)) # depends on [control=['if'], data=[]] elif isinstance(parent, list): if idx == '*': yield from parent # depends on [control=['if'], data=[]] else: (is_int, i) = try_parse_int(idx) if is_int and i >= 0 and (i < len(parent)): yield parent[i] # depends on [control=['if'], data=[]] else: raise JSONPathError('Invalid list index: {0}'.format(i)) # depends on [control=['if'], data=[]] else: raise JSONPathError('Type {0} does not have children'.format(type(parent).__name__))
def stack(self, size=None): """ Aggregates records of a distributed array. Stacking should improve the performance of vectorized operations, but the resulting StackedArray object only exposes a restricted set of operations (e.g. map, reduce). The unstack method can be used to restore the full bolt array. Parameters ---------- size : int, optional, default=None The maximum size for each stack (number of original records), will aggregate groups of records per partition up to this size, if None will aggregate all records on each partition. Returns ------- StackedArray """ stk = StackedArray(self._rdd, shape=self.shape, split=self.split) return stk.stack(size)
def function[stack, parameter[self, size]]: constant[ Aggregates records of a distributed array. Stacking should improve the performance of vectorized operations, but the resulting StackedArray object only exposes a restricted set of operations (e.g. map, reduce). The unstack method can be used to restore the full bolt array. Parameters ---------- size : int, optional, default=None The maximum size for each stack (number of original records), will aggregate groups of records per partition up to this size, if None will aggregate all records on each partition. Returns ------- StackedArray ] variable[stk] assign[=] call[name[StackedArray], parameter[name[self]._rdd]] return[call[name[stk].stack, parameter[name[size]]]]
keyword[def] identifier[stack] ( identifier[self] , identifier[size] = keyword[None] ): literal[string] identifier[stk] = identifier[StackedArray] ( identifier[self] . identifier[_rdd] , identifier[shape] = identifier[self] . identifier[shape] , identifier[split] = identifier[self] . identifier[split] ) keyword[return] identifier[stk] . identifier[stack] ( identifier[size] )
def stack(self, size=None): """ Aggregates records of a distributed array. Stacking should improve the performance of vectorized operations, but the resulting StackedArray object only exposes a restricted set of operations (e.g. map, reduce). The unstack method can be used to restore the full bolt array. Parameters ---------- size : int, optional, default=None The maximum size for each stack (number of original records), will aggregate groups of records per partition up to this size, if None will aggregate all records on each partition. Returns ------- StackedArray """ stk = StackedArray(self._rdd, shape=self.shape, split=self.split) return stk.stack(size)
def load_lang_conf(): """ Load language setting from language config file if it exists, otherwise try to use the local settings if Spyder provides a translation, or return the default if no translation provided. """ if osp.isfile(LANG_FILE): with open(LANG_FILE, 'r') as f: lang = f.read() else: lang = get_interface_language() save_lang_conf(lang) # Save language again if it's been disabled if lang.strip('\n') in DISABLED_LANGUAGES: lang = DEFAULT_LANGUAGE save_lang_conf(lang) return lang
def function[load_lang_conf, parameter[]]: constant[ Load language setting from language config file if it exists, otherwise try to use the local settings if Spyder provides a translation, or return the default if no translation provided. ] if call[name[osp].isfile, parameter[name[LANG_FILE]]] begin[:] with call[name[open], parameter[name[LANG_FILE], constant[r]]] begin[:] variable[lang] assign[=] call[name[f].read, parameter[]] if compare[call[name[lang].strip, parameter[constant[ ]]] in name[DISABLED_LANGUAGES]] begin[:] variable[lang] assign[=] name[DEFAULT_LANGUAGE] call[name[save_lang_conf], parameter[name[lang]]] return[name[lang]]
keyword[def] identifier[load_lang_conf] (): literal[string] keyword[if] identifier[osp] . identifier[isfile] ( identifier[LANG_FILE] ): keyword[with] identifier[open] ( identifier[LANG_FILE] , literal[string] ) keyword[as] identifier[f] : identifier[lang] = identifier[f] . identifier[read] () keyword[else] : identifier[lang] = identifier[get_interface_language] () identifier[save_lang_conf] ( identifier[lang] ) keyword[if] identifier[lang] . identifier[strip] ( literal[string] ) keyword[in] identifier[DISABLED_LANGUAGES] : identifier[lang] = identifier[DEFAULT_LANGUAGE] identifier[save_lang_conf] ( identifier[lang] ) keyword[return] identifier[lang]
def load_lang_conf(): """ Load language setting from language config file if it exists, otherwise try to use the local settings if Spyder provides a translation, or return the default if no translation provided. """ if osp.isfile(LANG_FILE): with open(LANG_FILE, 'r') as f: lang = f.read() # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] else: lang = get_interface_language() save_lang_conf(lang) # Save language again if it's been disabled if lang.strip('\n') in DISABLED_LANGUAGES: lang = DEFAULT_LANGUAGE save_lang_conf(lang) # depends on [control=['if'], data=[]] return lang
def run(self, num_runs, show_trails, log_file_base): """ Run each agent in the world for 'num_runs' iterations Optionally saves grid results to file if base name is passed to method. """ print("--------------------------------------------------") print("Starting Simulation - target = ", self.agent_list[0].target_y, self.agent_list[0].target_x) self.world.grd.set_tile(self.agent_list[0].target_y , self.agent_list[0].target_x , 'T') self.highlight_cell_surroundings(self.agent_list[0].target_y, self.agent_list[0].target_x) self.start_all_agents() # save the agents results here try: with open (log_file_base + '__agents.txt', "w") as f: f.write("Starting World = \n") f.write(str(self.world.grd)) except Exception: print('Cant save log results to ' + log_file_base) for cur_run in range(0,num_runs): print("WorldSimulation:run#", cur_run) for num, agt in enumerate(self.agent_list): if show_trails == 'Y': if len(self.agent_list) == 1 or len(self.agent_list) > 9: self.world.grd.set_tile(agt.current_y, agt.current_x, 'o') else: self.world.grd.set_tile(agt.current_y, agt.current_x, str(num)) agt.do_your_job() self.world.grd.set_tile(agt.current_y, agt.current_x, 'A') # update the main world grid with agents changes # save grid after each run if required if log_file_base != 'N': self.world.grd.save(log_file_base + '_' + str(cur_run) + '.log') # save the agents results here with open (log_file_base + '__agents.txt', "a") as f: f.write("\nWorld tgt= [" + str(self.agent_list[0].target_y) + "," + str(self.agent_list[0].target_x) + "]\n") f.write(str(self.world.grd)) f.write('\n\nAgent Name , starting, num Steps , num Climbs\n') for num, agt in enumerate(self.agent_list): res = agt.name + ' , [' + str(agt.start_y) + ', ' + str(agt.start_x) + '], ' res += str(agt.num_steps) + ' , ' + str(agt.num_climbs) + ' , ' res += ''.join([a for a in agt.results]) f.write(res + '\n')
def function[run, parameter[self, num_runs, show_trails, log_file_base]]: constant[ Run each agent in the world for 'num_runs' iterations Optionally saves grid results to file if base name is passed to method. ] call[name[print], parameter[constant[--------------------------------------------------]]] call[name[print], parameter[constant[Starting Simulation - target = ], call[name[self].agent_list][constant[0]].target_y, call[name[self].agent_list][constant[0]].target_x]] call[name[self].world.grd.set_tile, parameter[call[name[self].agent_list][constant[0]].target_y, call[name[self].agent_list][constant[0]].target_x, constant[T]]] call[name[self].highlight_cell_surroundings, parameter[call[name[self].agent_list][constant[0]].target_y, call[name[self].agent_list][constant[0]].target_x]] call[name[self].start_all_agents, parameter[]] <ast.Try object at 0x7da1b1f9f220> for taget[name[cur_run]] in starred[call[name[range], parameter[constant[0], name[num_runs]]]] begin[:] call[name[print], parameter[constant[WorldSimulation:run#], name[cur_run]]] for taget[tuple[[<ast.Name object at 0x7da1b1f9dd50>, <ast.Name object at 0x7da1b1f9c1c0>]]] in starred[call[name[enumerate], parameter[name[self].agent_list]]] begin[:] if compare[name[show_trails] equal[==] constant[Y]] begin[:] if <ast.BoolOp object at 0x7da1b1f9edd0> begin[:] call[name[self].world.grd.set_tile, parameter[name[agt].current_y, name[agt].current_x, constant[o]]] call[name[agt].do_your_job, parameter[]] call[name[self].world.grd.set_tile, parameter[name[agt].current_y, name[agt].current_x, constant[A]]] if compare[name[log_file_base] not_equal[!=] constant[N]] begin[:] call[name[self].world.grd.save, parameter[binary_operation[binary_operation[binary_operation[name[log_file_base] + constant[_]] + call[name[str], parameter[name[cur_run]]]] + constant[.log]]]] with call[name[open], parameter[binary_operation[name[log_file_base] + constant[__agents.txt]], constant[a]]] begin[:] call[name[f].write, parameter[binary_operation[binary_operation[binary_operation[binary_operation[constant[ World tgt= [] + call[name[str], parameter[call[name[self].agent_list][constant[0]].target_y]]] + constant[,]] + call[name[str], parameter[call[name[self].agent_list][constant[0]].target_x]]] + constant[] ]]]] call[name[f].write, parameter[call[name[str], parameter[name[self].world.grd]]]] call[name[f].write, parameter[constant[ Agent Name , starting, num Steps , num Climbs ]]] for taget[tuple[[<ast.Name object at 0x7da1b1ff8250>, <ast.Name object at 0x7da1b1ffbb20>]]] in starred[call[name[enumerate], parameter[name[self].agent_list]]] begin[:] variable[res] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[agt].name + constant[ , []] + call[name[str], parameter[name[agt].start_y]]] + constant[, ]] + call[name[str], parameter[name[agt].start_x]]] + constant[], ]] <ast.AugAssign object at 0x7da1b1ffba30> <ast.AugAssign object at 0x7da1b1ffa5c0> call[name[f].write, parameter[binary_operation[name[res] + constant[ ]]]]
keyword[def] identifier[run] ( identifier[self] , identifier[num_runs] , identifier[show_trails] , identifier[log_file_base] ): literal[string] identifier[print] ( literal[string] ) identifier[print] ( literal[string] , identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_y] , identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_x] ) identifier[self] . identifier[world] . identifier[grd] . identifier[set_tile] ( identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_y] , identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_x] , literal[string] ) identifier[self] . identifier[highlight_cell_surroundings] ( identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_y] , identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_x] ) identifier[self] . identifier[start_all_agents] () keyword[try] : keyword[with] identifier[open] ( identifier[log_file_base] + literal[string] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( identifier[str] ( identifier[self] . identifier[world] . identifier[grd] )) keyword[except] identifier[Exception] : identifier[print] ( literal[string] + identifier[log_file_base] ) keyword[for] identifier[cur_run] keyword[in] identifier[range] ( literal[int] , identifier[num_runs] ): identifier[print] ( literal[string] , identifier[cur_run] ) keyword[for] identifier[num] , identifier[agt] keyword[in] identifier[enumerate] ( identifier[self] . identifier[agent_list] ): keyword[if] identifier[show_trails] == literal[string] : keyword[if] identifier[len] ( identifier[self] . identifier[agent_list] )== literal[int] keyword[or] identifier[len] ( identifier[self] . identifier[agent_list] )> literal[int] : identifier[self] . identifier[world] . identifier[grd] . identifier[set_tile] ( identifier[agt] . identifier[current_y] , identifier[agt] . identifier[current_x] , literal[string] ) keyword[else] : identifier[self] . identifier[world] . identifier[grd] . identifier[set_tile] ( identifier[agt] . identifier[current_y] , identifier[agt] . identifier[current_x] , identifier[str] ( identifier[num] )) identifier[agt] . identifier[do_your_job] () identifier[self] . identifier[world] . identifier[grd] . identifier[set_tile] ( identifier[agt] . identifier[current_y] , identifier[agt] . identifier[current_x] , literal[string] ) keyword[if] identifier[log_file_base] != literal[string] : identifier[self] . identifier[world] . identifier[grd] . identifier[save] ( identifier[log_file_base] + literal[string] + identifier[str] ( identifier[cur_run] )+ literal[string] ) keyword[with] identifier[open] ( identifier[log_file_base] + literal[string] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_y] )+ literal[string] + identifier[str] ( identifier[self] . identifier[agent_list] [ literal[int] ]. identifier[target_x] )+ literal[string] ) identifier[f] . identifier[write] ( identifier[str] ( identifier[self] . identifier[world] . identifier[grd] )) identifier[f] . identifier[write] ( literal[string] ) keyword[for] identifier[num] , identifier[agt] keyword[in] identifier[enumerate] ( identifier[self] . identifier[agent_list] ): identifier[res] = identifier[agt] . identifier[name] + literal[string] + identifier[str] ( identifier[agt] . identifier[start_y] )+ literal[string] + identifier[str] ( identifier[agt] . identifier[start_x] )+ literal[string] identifier[res] += identifier[str] ( identifier[agt] . identifier[num_steps] )+ literal[string] + identifier[str] ( identifier[agt] . identifier[num_climbs] )+ literal[string] identifier[res] += literal[string] . identifier[join] ([ identifier[a] keyword[for] identifier[a] keyword[in] identifier[agt] . identifier[results] ]) identifier[f] . identifier[write] ( identifier[res] + literal[string] )
def run(self, num_runs, show_trails, log_file_base): """ Run each agent in the world for 'num_runs' iterations Optionally saves grid results to file if base name is passed to method. """ print('--------------------------------------------------') print('Starting Simulation - target = ', self.agent_list[0].target_y, self.agent_list[0].target_x) self.world.grd.set_tile(self.agent_list[0].target_y, self.agent_list[0].target_x, 'T') self.highlight_cell_surroundings(self.agent_list[0].target_y, self.agent_list[0].target_x) self.start_all_agents() # save the agents results here try: with open(log_file_base + '__agents.txt', 'w') as f: f.write('Starting World = \n') f.write(str(self.world.grd)) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except Exception: print('Cant save log results to ' + log_file_base) # depends on [control=['except'], data=[]] for cur_run in range(0, num_runs): print('WorldSimulation:run#', cur_run) for (num, agt) in enumerate(self.agent_list): if show_trails == 'Y': if len(self.agent_list) == 1 or len(self.agent_list) > 9: self.world.grd.set_tile(agt.current_y, agt.current_x, 'o') # depends on [control=['if'], data=[]] else: self.world.grd.set_tile(agt.current_y, agt.current_x, str(num)) # depends on [control=['if'], data=[]] agt.do_your_job() self.world.grd.set_tile(agt.current_y, agt.current_x, 'A') # update the main world grid with agents changes # depends on [control=['for'], data=[]] # save grid after each run if required if log_file_base != 'N': self.world.grd.save(log_file_base + '_' + str(cur_run) + '.log') # depends on [control=['if'], data=['log_file_base']] # depends on [control=['for'], data=['cur_run']] # save the agents results here with open(log_file_base + '__agents.txt', 'a') as f: f.write('\nWorld tgt= [' + str(self.agent_list[0].target_y) + ',' + str(self.agent_list[0].target_x) + ']\n') f.write(str(self.world.grd)) f.write('\n\nAgent Name , starting, num Steps , num Climbs\n') for (num, agt) in enumerate(self.agent_list): res = agt.name + ' , [' + str(agt.start_y) + ', ' + str(agt.start_x) + '], ' res += str(agt.num_steps) + ' , ' + str(agt.num_climbs) + ' , ' res += ''.join([a for a in agt.results]) f.write(res + '\n') # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']]
def _migrate_resource(instance, migrations, version=''): """ Migrate a resource instance Subresources are migrated first, then the resource is recursively migrated :param instance: a perch.Document instance :param migrations: the migrations for a resource :param version: the current resource version to migrate """ if version not in migrations: return instance instance = _migrate_subresources( instance, migrations[version]['subresources'] ) for migration in migrations[version]['migrations']: instance = migration(instance) instance._resource['doc_version'] = unicode(migration.version) instance = _migrate_resource( instance, migrations, version=migration.version ) return instance
def function[_migrate_resource, parameter[instance, migrations, version]]: constant[ Migrate a resource instance Subresources are migrated first, then the resource is recursively migrated :param instance: a perch.Document instance :param migrations: the migrations for a resource :param version: the current resource version to migrate ] if compare[name[version] <ast.NotIn object at 0x7da2590d7190> name[migrations]] begin[:] return[name[instance]] variable[instance] assign[=] call[name[_migrate_subresources], parameter[name[instance], call[call[name[migrations]][name[version]]][constant[subresources]]]] for taget[name[migration]] in starred[call[call[name[migrations]][name[version]]][constant[migrations]]] begin[:] variable[instance] assign[=] call[name[migration], parameter[name[instance]]] call[name[instance]._resource][constant[doc_version]] assign[=] call[name[unicode], parameter[name[migration].version]] variable[instance] assign[=] call[name[_migrate_resource], parameter[name[instance], name[migrations]]] return[name[instance]]
keyword[def] identifier[_migrate_resource] ( identifier[instance] , identifier[migrations] , identifier[version] = literal[string] ): literal[string] keyword[if] identifier[version] keyword[not] keyword[in] identifier[migrations] : keyword[return] identifier[instance] identifier[instance] = identifier[_migrate_subresources] ( identifier[instance] , identifier[migrations] [ identifier[version] ][ literal[string] ] ) keyword[for] identifier[migration] keyword[in] identifier[migrations] [ identifier[version] ][ literal[string] ]: identifier[instance] = identifier[migration] ( identifier[instance] ) identifier[instance] . identifier[_resource] [ literal[string] ]= identifier[unicode] ( identifier[migration] . identifier[version] ) identifier[instance] = identifier[_migrate_resource] ( identifier[instance] , identifier[migrations] , identifier[version] = identifier[migration] . identifier[version] ) keyword[return] identifier[instance]
def _migrate_resource(instance, migrations, version=''): """ Migrate a resource instance Subresources are migrated first, then the resource is recursively migrated :param instance: a perch.Document instance :param migrations: the migrations for a resource :param version: the current resource version to migrate """ if version not in migrations: return instance # depends on [control=['if'], data=[]] instance = _migrate_subresources(instance, migrations[version]['subresources']) for migration in migrations[version]['migrations']: instance = migration(instance) instance._resource['doc_version'] = unicode(migration.version) instance = _migrate_resource(instance, migrations, version=migration.version) # depends on [control=['for'], data=['migration']] return instance
def transmit_ack_bpdu(self): """ Send Topology Change Ack BPDU. """ ack_flags = 0b10000001 bpdu_data = self._generate_config_bpdu(ack_flags) self.ofctl.send_packet_out(self.ofport.port_no, bpdu_data)
def function[transmit_ack_bpdu, parameter[self]]: constant[ Send Topology Change Ack BPDU. ] variable[ack_flags] assign[=] constant[129] variable[bpdu_data] assign[=] call[name[self]._generate_config_bpdu, parameter[name[ack_flags]]] call[name[self].ofctl.send_packet_out, parameter[name[self].ofport.port_no, name[bpdu_data]]]
keyword[def] identifier[transmit_ack_bpdu] ( identifier[self] ): literal[string] identifier[ack_flags] = literal[int] identifier[bpdu_data] = identifier[self] . identifier[_generate_config_bpdu] ( identifier[ack_flags] ) identifier[self] . identifier[ofctl] . identifier[send_packet_out] ( identifier[self] . identifier[ofport] . identifier[port_no] , identifier[bpdu_data] )
def transmit_ack_bpdu(self): """ Send Topology Change Ack BPDU. """ ack_flags = 129 bpdu_data = self._generate_config_bpdu(ack_flags) self.ofctl.send_packet_out(self.ofport.port_no, bpdu_data)
def flush(self): """ Force commit changes to the file and stdout """ if not self.nostdout: self.stdout.flush() if self.file is not None: self.file.flush()
def function[flush, parameter[self]]: constant[ Force commit changes to the file and stdout ] if <ast.UnaryOp object at 0x7da1b04005e0> begin[:] call[name[self].stdout.flush, parameter[]] if compare[name[self].file is_not constant[None]] begin[:] call[name[self].file.flush, parameter[]]
keyword[def] identifier[flush] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[nostdout] : identifier[self] . identifier[stdout] . identifier[flush] () keyword[if] identifier[self] . identifier[file] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[file] . identifier[flush] ()
def flush(self): """ Force commit changes to the file and stdout """ if not self.nostdout: self.stdout.flush() # depends on [control=['if'], data=[]] if self.file is not None: self.file.flush() # depends on [control=['if'], data=[]]
def _update_id(record, new_id): """ Update a record id to new_id, also modifying the ID in record.description """ old_id = record.id record.id = new_id # At least for FASTA, record ID starts the description record.description = re.sub('^' + re.escape(old_id), new_id, record.description) return record
def function[_update_id, parameter[record, new_id]]: constant[ Update a record id to new_id, also modifying the ID in record.description ] variable[old_id] assign[=] name[record].id name[record].id assign[=] name[new_id] name[record].description assign[=] call[name[re].sub, parameter[binary_operation[constant[^] + call[name[re].escape, parameter[name[old_id]]]], name[new_id], name[record].description]] return[name[record]]
keyword[def] identifier[_update_id] ( identifier[record] , identifier[new_id] ): literal[string] identifier[old_id] = identifier[record] . identifier[id] identifier[record] . identifier[id] = identifier[new_id] identifier[record] . identifier[description] = identifier[re] . identifier[sub] ( literal[string] + identifier[re] . identifier[escape] ( identifier[old_id] ), identifier[new_id] , identifier[record] . identifier[description] ) keyword[return] identifier[record]
def _update_id(record, new_id): """ Update a record id to new_id, also modifying the ID in record.description """ old_id = record.id record.id = new_id # At least for FASTA, record ID starts the description record.description = re.sub('^' + re.escape(old_id), new_id, record.description) return record
def to_native(self): """ Convert to a native Python `datetime.time` value. """ h, m, s = self.hour_minute_second s, ns = nano_divmod(s, 1) ms = int(nano_mul(ns, 1000000)) return time(h, m, s, ms)
def function[to_native, parameter[self]]: constant[ Convert to a native Python `datetime.time` value. ] <ast.Tuple object at 0x7da1b2581960> assign[=] name[self].hour_minute_second <ast.Tuple object at 0x7da1b2582d10> assign[=] call[name[nano_divmod], parameter[name[s], constant[1]]] variable[ms] assign[=] call[name[int], parameter[call[name[nano_mul], parameter[name[ns], constant[1000000]]]]] return[call[name[time], parameter[name[h], name[m], name[s], name[ms]]]]
keyword[def] identifier[to_native] ( identifier[self] ): literal[string] identifier[h] , identifier[m] , identifier[s] = identifier[self] . identifier[hour_minute_second] identifier[s] , identifier[ns] = identifier[nano_divmod] ( identifier[s] , literal[int] ) identifier[ms] = identifier[int] ( identifier[nano_mul] ( identifier[ns] , literal[int] )) keyword[return] identifier[time] ( identifier[h] , identifier[m] , identifier[s] , identifier[ms] )
def to_native(self): """ Convert to a native Python `datetime.time` value. """ (h, m, s) = self.hour_minute_second (s, ns) = nano_divmod(s, 1) ms = int(nano_mul(ns, 1000000)) return time(h, m, s, ms)
def get_desired(): """Populate ``DESIRED_TEMPLATE`` with public members. If there are no members, does nothing. Returns: str: The "desired" contents of ``bezier.rst``. """ public_members = get_public_members() if public_members: members = "\n :members: {}".format(", ".join(public_members)) else: members = "" return DESIRED_TEMPLATE.format(members=members)
def function[get_desired, parameter[]]: constant[Populate ``DESIRED_TEMPLATE`` with public members. If there are no members, does nothing. Returns: str: The "desired" contents of ``bezier.rst``. ] variable[public_members] assign[=] call[name[get_public_members], parameter[]] if name[public_members] begin[:] variable[members] assign[=] call[constant[ :members: {}].format, parameter[call[constant[, ].join, parameter[name[public_members]]]]] return[call[name[DESIRED_TEMPLATE].format, parameter[]]]
keyword[def] identifier[get_desired] (): literal[string] identifier[public_members] = identifier[get_public_members] () keyword[if] identifier[public_members] : identifier[members] = literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[public_members] )) keyword[else] : identifier[members] = literal[string] keyword[return] identifier[DESIRED_TEMPLATE] . identifier[format] ( identifier[members] = identifier[members] )
def get_desired(): """Populate ``DESIRED_TEMPLATE`` with public members. If there are no members, does nothing. Returns: str: The "desired" contents of ``bezier.rst``. """ public_members = get_public_members() if public_members: members = '\n :members: {}'.format(', '.join(public_members)) # depends on [control=['if'], data=[]] else: members = '' return DESIRED_TEMPLATE.format(members=members)
def marv(ctx, config, loglevel, logfilter, verbosity): """Manage a Marv site""" if config is None: cwd = os.path.abspath(os.path.curdir) while cwd != os.path.sep: config = os.path.join(cwd, 'marv.conf') if os.path.exists(config): break cwd = os.path.dirname(cwd) else: config = '/etc/marv/marv.conf' if not os.path.exists(config): config = None ctx.obj = config setup_logging(loglevel, verbosity, logfilter)
def function[marv, parameter[ctx, config, loglevel, logfilter, verbosity]]: constant[Manage a Marv site] if compare[name[config] is constant[None]] begin[:] variable[cwd] assign[=] call[name[os].path.abspath, parameter[name[os].path.curdir]] while compare[name[cwd] not_equal[!=] name[os].path.sep] begin[:] variable[config] assign[=] call[name[os].path.join, parameter[name[cwd], constant[marv.conf]]] if call[name[os].path.exists, parameter[name[config]]] begin[:] break variable[cwd] assign[=] call[name[os].path.dirname, parameter[name[cwd]]] name[ctx].obj assign[=] name[config] call[name[setup_logging], parameter[name[loglevel], name[verbosity], name[logfilter]]]
keyword[def] identifier[marv] ( identifier[ctx] , identifier[config] , identifier[loglevel] , identifier[logfilter] , identifier[verbosity] ): literal[string] keyword[if] identifier[config] keyword[is] keyword[None] : identifier[cwd] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[curdir] ) keyword[while] identifier[cwd] != identifier[os] . identifier[path] . identifier[sep] : identifier[config] = identifier[os] . identifier[path] . identifier[join] ( identifier[cwd] , literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[config] ): keyword[break] identifier[cwd] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[cwd] ) keyword[else] : identifier[config] = literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[config] ): identifier[config] = keyword[None] identifier[ctx] . identifier[obj] = identifier[config] identifier[setup_logging] ( identifier[loglevel] , identifier[verbosity] , identifier[logfilter] )
def marv(ctx, config, loglevel, logfilter, verbosity): """Manage a Marv site""" if config is None: cwd = os.path.abspath(os.path.curdir) while cwd != os.path.sep: config = os.path.join(cwd, 'marv.conf') if os.path.exists(config): break # depends on [control=['if'], data=[]] cwd = os.path.dirname(cwd) # depends on [control=['while'], data=['cwd']] else: config = '/etc/marv/marv.conf' if not os.path.exists(config): config = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['config']] ctx.obj = config setup_logging(loglevel, verbosity, logfilter)
def BVS(name, size, min=None, max=None, stride=None, uninitialized=False, #pylint:disable=redefined-builtin explicit_name=None, discrete_set=False, discrete_set_max_card=None, **kwargs): """ Creates a bit-vector symbol (i.e., a variable). If you want to specify the maximum or minimum value of a normal symbol that is not part of value-set analysis, you should manually add constraints to that effect. **Do not use ``min`` and ``max`` for symbolic execution.** :param name: The name of the symbol. :param size: The size (in bits) of the bit-vector. :param min: The minimum value of the symbol, used only for value-set analysis :param max: The maximum value of the symbol, used only for value-set analysis :param stride: The stride of the symbol, used only for value-set analysis :param uninitialized: Whether this value should be counted as an "uninitialized" value in the course of an analysis. :param bool explicit_name: If False, an identifier is appended to the name to ensure uniqueness. :param bool discrete_set: If True, a DiscreteStridedIntervalSet will be used instead of a normal StridedInterval. :param int discrete_set_max_card: The maximum cardinality of the discrete set. It is ignored if discrete_set is set to False or None. :returns: a BV object representing this symbol. """ if stride == 0 and max != min: raise ClaripyValueError("BVSes of stride 0 should have max == min") encoded_name = None if type(name) is bytes: encoded_name = name name = name.decode() if type(name) is not str: raise TypeError("Name value for BVS must be a str, got %r" % type(name)) n = _make_name(name, size, False if explicit_name is None else explicit_name) if not discrete_set: discrete_set_max_card = None return BV('BVS', (n, min, max, stride, uninitialized, discrete_set, discrete_set_max_card), variables={n}, length=size, symbolic=True, eager_backends=None, uninitialized=uninitialized, encoded_name=encoded_name, **kwargs)
def function[BVS, parameter[name, size, min, max, stride, uninitialized, explicit_name, discrete_set, discrete_set_max_card]]: constant[ Creates a bit-vector symbol (i.e., a variable). If you want to specify the maximum or minimum value of a normal symbol that is not part of value-set analysis, you should manually add constraints to that effect. **Do not use ``min`` and ``max`` for symbolic execution.** :param name: The name of the symbol. :param size: The size (in bits) of the bit-vector. :param min: The minimum value of the symbol, used only for value-set analysis :param max: The maximum value of the symbol, used only for value-set analysis :param stride: The stride of the symbol, used only for value-set analysis :param uninitialized: Whether this value should be counted as an "uninitialized" value in the course of an analysis. :param bool explicit_name: If False, an identifier is appended to the name to ensure uniqueness. :param bool discrete_set: If True, a DiscreteStridedIntervalSet will be used instead of a normal StridedInterval. :param int discrete_set_max_card: The maximum cardinality of the discrete set. It is ignored if discrete_set is set to False or None. :returns: a BV object representing this symbol. ] if <ast.BoolOp object at 0x7da20c7ca710> begin[:] <ast.Raise object at 0x7da20c7c8b20> variable[encoded_name] assign[=] constant[None] if compare[call[name[type], parameter[name[name]]] is name[bytes]] begin[:] variable[encoded_name] assign[=] name[name] variable[name] assign[=] call[name[name].decode, parameter[]] if compare[call[name[type], parameter[name[name]]] is_not name[str]] begin[:] <ast.Raise object at 0x7da20c7c9e40> variable[n] assign[=] call[name[_make_name], parameter[name[name], name[size], <ast.IfExp object at 0x7da20c7c8a30>]] if <ast.UnaryOp object at 0x7da20c7c8fa0> begin[:] variable[discrete_set_max_card] assign[=] constant[None] return[call[name[BV], parameter[constant[BVS], tuple[[<ast.Name object at 0x7da20c7ca770>, <ast.Name object at 0x7da20c7cb880>, <ast.Name object at 0x7da20c7c91e0>, <ast.Name object at 0x7da20c7cbf10>, <ast.Name object at 0x7da20c7cb1c0>, <ast.Name object at 0x7da20c7ca320>, <ast.Name object at 0x7da20c7ca830>]]]]]
keyword[def] identifier[BVS] ( identifier[name] , identifier[size] , identifier[min] = keyword[None] , identifier[max] = keyword[None] , identifier[stride] = keyword[None] , identifier[uninitialized] = keyword[False] , identifier[explicit_name] = keyword[None] , identifier[discrete_set] = keyword[False] , identifier[discrete_set_max_card] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[stride] == literal[int] keyword[and] identifier[max] != identifier[min] : keyword[raise] identifier[ClaripyValueError] ( literal[string] ) identifier[encoded_name] = keyword[None] keyword[if] identifier[type] ( identifier[name] ) keyword[is] identifier[bytes] : identifier[encoded_name] = identifier[name] identifier[name] = identifier[name] . identifier[decode] () keyword[if] identifier[type] ( identifier[name] ) keyword[is] keyword[not] identifier[str] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[type] ( identifier[name] )) identifier[n] = identifier[_make_name] ( identifier[name] , identifier[size] , keyword[False] keyword[if] identifier[explicit_name] keyword[is] keyword[None] keyword[else] identifier[explicit_name] ) keyword[if] keyword[not] identifier[discrete_set] : identifier[discrete_set_max_card] = keyword[None] keyword[return] identifier[BV] ( literal[string] ,( identifier[n] , identifier[min] , identifier[max] , identifier[stride] , identifier[uninitialized] , identifier[discrete_set] , identifier[discrete_set_max_card] ), identifier[variables] ={ identifier[n] }, identifier[length] = identifier[size] , identifier[symbolic] = keyword[True] , identifier[eager_backends] = keyword[None] , identifier[uninitialized] = identifier[uninitialized] , identifier[encoded_name] = identifier[encoded_name] , ** identifier[kwargs] )
def BVS(name, size, min=None, max=None, stride=None, uninitialized=False, explicit_name=None, discrete_set=False, discrete_set_max_card=None, **kwargs): #pylint:disable=redefined-builtin '\n Creates a bit-vector symbol (i.e., a variable).\n\n If you want to specify the maximum or minimum value of a normal symbol that is not part of value-set analysis, you\n should manually add constraints to that effect. **Do not use ``min`` and ``max`` for symbolic execution.**\n\n :param name: The name of the symbol.\n :param size: The size (in bits) of the bit-vector.\n :param min: The minimum value of the symbol, used only for value-set analysis\n :param max: The maximum value of the symbol, used only for value-set analysis\n :param stride: The stride of the symbol, used only for value-set analysis\n :param uninitialized: Whether this value should be counted as an "uninitialized" value in the course of an\n analysis.\n :param bool explicit_name: If False, an identifier is appended to the name to ensure uniqueness.\n :param bool discrete_set: If True, a DiscreteStridedIntervalSet will be used instead of a normal StridedInterval.\n :param int discrete_set_max_card: The maximum cardinality of the discrete set. It is ignored if discrete_set is set\n to False or None.\n\n :returns: a BV object representing this symbol.\n ' if stride == 0 and max != min: raise ClaripyValueError('BVSes of stride 0 should have max == min') # depends on [control=['if'], data=[]] encoded_name = None if type(name) is bytes: encoded_name = name name = name.decode() # depends on [control=['if'], data=[]] if type(name) is not str: raise TypeError('Name value for BVS must be a str, got %r' % type(name)) # depends on [control=['if'], data=[]] n = _make_name(name, size, False if explicit_name is None else explicit_name) if not discrete_set: discrete_set_max_card = None # depends on [control=['if'], data=[]] return BV('BVS', (n, min, max, stride, uninitialized, discrete_set, discrete_set_max_card), variables={n}, length=size, symbolic=True, eager_backends=None, uninitialized=uninitialized, encoded_name=encoded_name, **kwargs)
def add_node_collection(self, node, collection): """Add the collected test items from a node The collection is stored in the ``.node2collection`` map. Called by the ``DSession.worker_collectionfinish`` hook. """ assert node in self.node2pending if self.collection_is_completed: # A new node has been added later, perhaps an original one died. # .schedule() should have # been called by now assert self.collection if collection != self.collection: other_node = next(iter(self.node2collection.keys())) msg = report_collection_diff( self.collection, collection, other_node.gateway.id, node.gateway.id ) self.log(msg) return self.node2collection[node] = list(collection)
def function[add_node_collection, parameter[self, node, collection]]: constant[Add the collected test items from a node The collection is stored in the ``.node2collection`` map. Called by the ``DSession.worker_collectionfinish`` hook. ] assert[compare[name[node] in name[self].node2pending]] if name[self].collection_is_completed begin[:] assert[name[self].collection] if compare[name[collection] not_equal[!=] name[self].collection] begin[:] variable[other_node] assign[=] call[name[next], parameter[call[name[iter], parameter[call[name[self].node2collection.keys, parameter[]]]]]] variable[msg] assign[=] call[name[report_collection_diff], parameter[name[self].collection, name[collection], name[other_node].gateway.id, name[node].gateway.id]] call[name[self].log, parameter[name[msg]]] return[None] call[name[self].node2collection][name[node]] assign[=] call[name[list], parameter[name[collection]]]
keyword[def] identifier[add_node_collection] ( identifier[self] , identifier[node] , identifier[collection] ): literal[string] keyword[assert] identifier[node] keyword[in] identifier[self] . identifier[node2pending] keyword[if] identifier[self] . identifier[collection_is_completed] : keyword[assert] identifier[self] . identifier[collection] keyword[if] identifier[collection] != identifier[self] . identifier[collection] : identifier[other_node] = identifier[next] ( identifier[iter] ( identifier[self] . identifier[node2collection] . identifier[keys] ())) identifier[msg] = identifier[report_collection_diff] ( identifier[self] . identifier[collection] , identifier[collection] , identifier[other_node] . identifier[gateway] . identifier[id] , identifier[node] . identifier[gateway] . identifier[id] ) identifier[self] . identifier[log] ( identifier[msg] ) keyword[return] identifier[self] . identifier[node2collection] [ identifier[node] ]= identifier[list] ( identifier[collection] )
def add_node_collection(self, node, collection): """Add the collected test items from a node The collection is stored in the ``.node2collection`` map. Called by the ``DSession.worker_collectionfinish`` hook. """ assert node in self.node2pending if self.collection_is_completed: # A new node has been added later, perhaps an original one died. # .schedule() should have # been called by now assert self.collection if collection != self.collection: other_node = next(iter(self.node2collection.keys())) msg = report_collection_diff(self.collection, collection, other_node.gateway.id, node.gateway.id) self.log(msg) return # depends on [control=['if'], data=['collection']] # depends on [control=['if'], data=[]] self.node2collection[node] = list(collection)
def state_probability(self, direction, repertoire, purview,): """Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty. """ purview_state = self.purview_state(direction) index = tuple(node_state if node in purview else 0 for node, node_state in enumerate(purview_state)) return repertoire[index]
def function[state_probability, parameter[self, direction, repertoire, purview]]: constant[Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty. ] variable[purview_state] assign[=] call[name[self].purview_state, parameter[name[direction]]] variable[index] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da20cabd840>]] return[call[name[repertoire]][name[index]]]
keyword[def] identifier[state_probability] ( identifier[self] , identifier[direction] , identifier[repertoire] , identifier[purview] ,): literal[string] identifier[purview_state] = identifier[self] . identifier[purview_state] ( identifier[direction] ) identifier[index] = identifier[tuple] ( identifier[node_state] keyword[if] identifier[node] keyword[in] identifier[purview] keyword[else] literal[int] keyword[for] identifier[node] , identifier[node_state] keyword[in] identifier[enumerate] ( identifier[purview_state] )) keyword[return] identifier[repertoire] [ identifier[index] ]
def state_probability(self, direction, repertoire, purview): """Compute the probability of the purview in its current state given the repertoire. Collapses the dimensions of the repertoire that correspond to the purview nodes onto their state. All other dimension are already singular and thus receive 0 as the conditioning index. Returns: float: A single probabilty. """ purview_state = self.purview_state(direction) index = tuple((node_state if node in purview else 0 for (node, node_state) in enumerate(purview_state))) return repertoire[index]
def save_files(self, selections) -> None: """Save the |Selection| objects contained in the given |Selections| instance to separate network files.""" try: currentpath = self.currentpath selections = selectiontools.Selections(selections) for selection in selections: if selection.name == 'complete': continue path = os.path.join(currentpath, selection.name+'.py') selection.save_networkfile(filepath=path) except BaseException: objecttools.augment_excmessage( 'While trying to save selections `%s` into network files' % selections)
def function[save_files, parameter[self, selections]]: constant[Save the |Selection| objects contained in the given |Selections| instance to separate network files.] <ast.Try object at 0x7da2044c2cb0>
keyword[def] identifier[save_files] ( identifier[self] , identifier[selections] )-> keyword[None] : literal[string] keyword[try] : identifier[currentpath] = identifier[self] . identifier[currentpath] identifier[selections] = identifier[selectiontools] . identifier[Selections] ( identifier[selections] ) keyword[for] identifier[selection] keyword[in] identifier[selections] : keyword[if] identifier[selection] . identifier[name] == literal[string] : keyword[continue] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[currentpath] , identifier[selection] . identifier[name] + literal[string] ) identifier[selection] . identifier[save_networkfile] ( identifier[filepath] = identifier[path] ) keyword[except] identifier[BaseException] : identifier[objecttools] . identifier[augment_excmessage] ( literal[string] % identifier[selections] )
def save_files(self, selections) -> None: """Save the |Selection| objects contained in the given |Selections| instance to separate network files.""" try: currentpath = self.currentpath selections = selectiontools.Selections(selections) for selection in selections: if selection.name == 'complete': continue # depends on [control=['if'], data=[]] path = os.path.join(currentpath, selection.name + '.py') selection.save_networkfile(filepath=path) # depends on [control=['for'], data=['selection']] # depends on [control=['try'], data=[]] except BaseException: objecttools.augment_excmessage('While trying to save selections `%s` into network files' % selections) # depends on [control=['except'], data=[]]
def add_petabencana_layer(self): """Add petabencana layer to the map. This uses the PetaBencana API to fetch the latest floods in JK. See https://data.petabencana.id/floods """ from safe.gui.tools.peta_bencana_dialog import PetaBencanaDialog dialog = PetaBencanaDialog(self.iface.mainWindow(), self.iface) dialog.show()
def function[add_petabencana_layer, parameter[self]]: constant[Add petabencana layer to the map. This uses the PetaBencana API to fetch the latest floods in JK. See https://data.petabencana.id/floods ] from relative_module[safe.gui.tools.peta_bencana_dialog] import module[PetaBencanaDialog] variable[dialog] assign[=] call[name[PetaBencanaDialog], parameter[call[name[self].iface.mainWindow, parameter[]], name[self].iface]] call[name[dialog].show, parameter[]]
keyword[def] identifier[add_petabencana_layer] ( identifier[self] ): literal[string] keyword[from] identifier[safe] . identifier[gui] . identifier[tools] . identifier[peta_bencana_dialog] keyword[import] identifier[PetaBencanaDialog] identifier[dialog] = identifier[PetaBencanaDialog] ( identifier[self] . identifier[iface] . identifier[mainWindow] (), identifier[self] . identifier[iface] ) identifier[dialog] . identifier[show] ()
def add_petabencana_layer(self): """Add petabencana layer to the map. This uses the PetaBencana API to fetch the latest floods in JK. See https://data.petabencana.id/floods """ from safe.gui.tools.peta_bencana_dialog import PetaBencanaDialog dialog = PetaBencanaDialog(self.iface.mainWindow(), self.iface) dialog.show()
def get_items(self, url, container, container_object, local_object): """Get an objects from a container. :param url: :param container: """ headers, container_uri = self._return_base_data( url=url, container=container, container_object=container_object ) return self._getter( uri=container_uri, headers=headers, local_object=local_object )
def function[get_items, parameter[self, url, container, container_object, local_object]]: constant[Get an objects from a container. :param url: :param container: ] <ast.Tuple object at 0x7da2054a5420> assign[=] call[name[self]._return_base_data, parameter[]] return[call[name[self]._getter, parameter[]]]
keyword[def] identifier[get_items] ( identifier[self] , identifier[url] , identifier[container] , identifier[container_object] , identifier[local_object] ): literal[string] identifier[headers] , identifier[container_uri] = identifier[self] . identifier[_return_base_data] ( identifier[url] = identifier[url] , identifier[container] = identifier[container] , identifier[container_object] = identifier[container_object] ) keyword[return] identifier[self] . identifier[_getter] ( identifier[uri] = identifier[container_uri] , identifier[headers] = identifier[headers] , identifier[local_object] = identifier[local_object] )
def get_items(self, url, container, container_object, local_object): """Get an objects from a container. :param url: :param container: """ (headers, container_uri) = self._return_base_data(url=url, container=container, container_object=container_object) return self._getter(uri=container_uri, headers=headers, local_object=local_object)
def _static_identity(cls, domain, dtype, missing_value, window_safe, ndim, params): """ Return the identity of the Term that would be constructed from the given arguments. Identities that compare equal will cause us to return a cached instance rather than constructing a new one. We do this primarily because it makes dependency resolution easier. This is a classmethod so that it can be called from Term.__new__ to determine whether to produce a new instance. """ return (cls, domain, dtype, missing_value, window_safe, ndim, params)
def function[_static_identity, parameter[cls, domain, dtype, missing_value, window_safe, ndim, params]]: constant[ Return the identity of the Term that would be constructed from the given arguments. Identities that compare equal will cause us to return a cached instance rather than constructing a new one. We do this primarily because it makes dependency resolution easier. This is a classmethod so that it can be called from Term.__new__ to determine whether to produce a new instance. ] return[tuple[[<ast.Name object at 0x7da18bc71ab0>, <ast.Name object at 0x7da18bc732b0>, <ast.Name object at 0x7da18bc70190>, <ast.Name object at 0x7da18bc73820>, <ast.Name object at 0x7da18bc72b60>, <ast.Name object at 0x7da18bc71e40>, <ast.Name object at 0x7da18bc70940>]]]
keyword[def] identifier[_static_identity] ( identifier[cls] , identifier[domain] , identifier[dtype] , identifier[missing_value] , identifier[window_safe] , identifier[ndim] , identifier[params] ): literal[string] keyword[return] ( identifier[cls] , identifier[domain] , identifier[dtype] , identifier[missing_value] , identifier[window_safe] , identifier[ndim] , identifier[params] )
def _static_identity(cls, domain, dtype, missing_value, window_safe, ndim, params): """ Return the identity of the Term that would be constructed from the given arguments. Identities that compare equal will cause us to return a cached instance rather than constructing a new one. We do this primarily because it makes dependency resolution easier. This is a classmethod so that it can be called from Term.__new__ to determine whether to produce a new instance. """ return (cls, domain, dtype, missing_value, window_safe, ndim, params)
def get_attribute(self, name): """ Get (find) a I{non-attribute} attribute by name. @param name: A attribute name. @type name: str @return: A tuple: the requested (attribute, ancestry). @rtype: (L{SchemaObject}, [L{SchemaObject},..]) """ for child, ancestry in self.attributes(): if child.name == name: return (child, ancestry) return (None, [])
def function[get_attribute, parameter[self, name]]: constant[ Get (find) a I{non-attribute} attribute by name. @param name: A attribute name. @type name: str @return: A tuple: the requested (attribute, ancestry). @rtype: (L{SchemaObject}, [L{SchemaObject},..]) ] for taget[tuple[[<ast.Name object at 0x7da18ede5600>, <ast.Name object at 0x7da18ede48b0>]]] in starred[call[name[self].attributes, parameter[]]] begin[:] if compare[name[child].name equal[==] name[name]] begin[:] return[tuple[[<ast.Name object at 0x7da18ede4760>, <ast.Name object at 0x7da18ede5a80>]]] return[tuple[[<ast.Constant object at 0x7da18ede6aa0>, <ast.List object at 0x7da18ede4400>]]]
keyword[def] identifier[get_attribute] ( identifier[self] , identifier[name] ): literal[string] keyword[for] identifier[child] , identifier[ancestry] keyword[in] identifier[self] . identifier[attributes] (): keyword[if] identifier[child] . identifier[name] == identifier[name] : keyword[return] ( identifier[child] , identifier[ancestry] ) keyword[return] ( keyword[None] ,[])
def get_attribute(self, name): """ Get (find) a I{non-attribute} attribute by name. @param name: A attribute name. @type name: str @return: A tuple: the requested (attribute, ancestry). @rtype: (L{SchemaObject}, [L{SchemaObject},..]) """ for (child, ancestry) in self.attributes(): if child.name == name: return (child, ancestry) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return (None, [])
def _ipsi(y, tol=1.48e-9, maxiter=10): '''Inverse of psi (digamma) using Newton's method. For the purposes of Dirichlet MLE, since the parameters a[i] must always satisfy a > 0, we define ipsi :: R -> (0,inf).''' y = asanyarray(y, dtype='float') x0 = _piecewise(y, [y >= -2.22, y < -2.22], [(lambda x: exp(x) + 0.5), (lambda x: -1/(x+euler))]) for i in xrange(maxiter): x1 = x0 - (psi(x0) - y)/_trigamma(x0) if norm(x1 - x0) < tol: return x1 x0 = x1 raise Exception( 'Unable to converge in {} iterations, value is {}'.format(maxiter, x1))
def function[_ipsi, parameter[y, tol, maxiter]]: constant[Inverse of psi (digamma) using Newton's method. For the purposes of Dirichlet MLE, since the parameters a[i] must always satisfy a > 0, we define ipsi :: R -> (0,inf).] variable[y] assign[=] call[name[asanyarray], parameter[name[y]]] variable[x0] assign[=] call[name[_piecewise], parameter[name[y], list[[<ast.Compare object at 0x7da20c6c4c10>, <ast.Compare object at 0x7da20c6c5570>]], list[[<ast.Lambda object at 0x7da20c6c7850>, <ast.Lambda object at 0x7da1b06beda0>]]]] for taget[name[i]] in starred[call[name[xrange], parameter[name[maxiter]]]] begin[:] variable[x1] assign[=] binary_operation[name[x0] - binary_operation[binary_operation[call[name[psi], parameter[name[x0]]] - name[y]] / call[name[_trigamma], parameter[name[x0]]]]] if compare[call[name[norm], parameter[binary_operation[name[x1] - name[x0]]]] less[<] name[tol]] begin[:] return[name[x1]] variable[x0] assign[=] name[x1] <ast.Raise object at 0x7da1b06bfac0>
keyword[def] identifier[_ipsi] ( identifier[y] , identifier[tol] = literal[int] , identifier[maxiter] = literal[int] ): literal[string] identifier[y] = identifier[asanyarray] ( identifier[y] , identifier[dtype] = literal[string] ) identifier[x0] = identifier[_piecewise] ( identifier[y] ,[ identifier[y] >=- literal[int] , identifier[y] <- literal[int] ], [( keyword[lambda] identifier[x] : identifier[exp] ( identifier[x] )+ literal[int] ),( keyword[lambda] identifier[x] :- literal[int] /( identifier[x] + identifier[euler] ))]) keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[maxiter] ): identifier[x1] = identifier[x0] -( identifier[psi] ( identifier[x0] )- identifier[y] )/ identifier[_trigamma] ( identifier[x0] ) keyword[if] identifier[norm] ( identifier[x1] - identifier[x0] )< identifier[tol] : keyword[return] identifier[x1] identifier[x0] = identifier[x1] keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[maxiter] , identifier[x1] ))
def _ipsi(y, tol=1.48e-09, maxiter=10): """Inverse of psi (digamma) using Newton's method. For the purposes of Dirichlet MLE, since the parameters a[i] must always satisfy a > 0, we define ipsi :: R -> (0,inf).""" y = asanyarray(y, dtype='float') x0 = _piecewise(y, [y >= -2.22, y < -2.22], [lambda x: exp(x) + 0.5, lambda x: -1 / (x + euler)]) for i in xrange(maxiter): x1 = x0 - (psi(x0) - y) / _trigamma(x0) if norm(x1 - x0) < tol: return x1 # depends on [control=['if'], data=[]] x0 = x1 # depends on [control=['for'], data=[]] raise Exception('Unable to converge in {} iterations, value is {}'.format(maxiter, x1))
def _lookup_namespace(self, symbol, namespace): """Helper for lookup_symbol that only looks up variables in a namespace. Args: symbol: Symbol namespace: pointer into self.namespaces """ for namespace_part in symbol.parts: namespace = namespace.get(namespace_part) if namespace is None: break if not isinstance(namespace, dict): return namespace raise Error('%s not found' % symbol.name)
def function[_lookup_namespace, parameter[self, symbol, namespace]]: constant[Helper for lookup_symbol that only looks up variables in a namespace. Args: symbol: Symbol namespace: pointer into self.namespaces ] for taget[name[namespace_part]] in starred[name[symbol].parts] begin[:] variable[namespace] assign[=] call[name[namespace].get, parameter[name[namespace_part]]] if compare[name[namespace] is constant[None]] begin[:] break if <ast.UnaryOp object at 0x7da1b0cf74f0> begin[:] return[name[namespace]] <ast.Raise object at 0x7da1b0bd9390>
keyword[def] identifier[_lookup_namespace] ( identifier[self] , identifier[symbol] , identifier[namespace] ): literal[string] keyword[for] identifier[namespace_part] keyword[in] identifier[symbol] . identifier[parts] : identifier[namespace] = identifier[namespace] . identifier[get] ( identifier[namespace_part] ) keyword[if] identifier[namespace] keyword[is] keyword[None] : keyword[break] keyword[if] keyword[not] identifier[isinstance] ( identifier[namespace] , identifier[dict] ): keyword[return] identifier[namespace] keyword[raise] identifier[Error] ( literal[string] % identifier[symbol] . identifier[name] )
def _lookup_namespace(self, symbol, namespace): """Helper for lookup_symbol that only looks up variables in a namespace. Args: symbol: Symbol namespace: pointer into self.namespaces """ for namespace_part in symbol.parts: namespace = namespace.get(namespace_part) if namespace is None: break # depends on [control=['if'], data=[]] if not isinstance(namespace, dict): return namespace # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['namespace_part']] raise Error('%s not found' % symbol.name)
def sync_heater_control(self, device_id, fan_status=None, power_status=None): """Set heater temps.""" loop = asyncio.get_event_loop() task = loop.create_task(self.heater_control(device_id, fan_status, power_status)) loop.run_until_complete(task)
def function[sync_heater_control, parameter[self, device_id, fan_status, power_status]]: constant[Set heater temps.] variable[loop] assign[=] call[name[asyncio].get_event_loop, parameter[]] variable[task] assign[=] call[name[loop].create_task, parameter[call[name[self].heater_control, parameter[name[device_id], name[fan_status], name[power_status]]]]] call[name[loop].run_until_complete, parameter[name[task]]]
keyword[def] identifier[sync_heater_control] ( identifier[self] , identifier[device_id] , identifier[fan_status] = keyword[None] , identifier[power_status] = keyword[None] ): literal[string] identifier[loop] = identifier[asyncio] . identifier[get_event_loop] () identifier[task] = identifier[loop] . identifier[create_task] ( identifier[self] . identifier[heater_control] ( identifier[device_id] , identifier[fan_status] , identifier[power_status] )) identifier[loop] . identifier[run_until_complete] ( identifier[task] )
def sync_heater_control(self, device_id, fan_status=None, power_status=None): """Set heater temps.""" loop = asyncio.get_event_loop() task = loop.create_task(self.heater_control(device_id, fan_status, power_status)) loop.run_until_complete(task)
def length(symlink_components): # type: (List[bytes]) -> int ''' Static method to return the length of the Rock Ridge Symbolic Link record. Parameters: symlink_components - A list containing a string for each of the symbolic link components. Returns: The length of this record in bytes. ''' length = RRSLRecord.header_length() for comp in symlink_components: length += RRSLRecord.Component.length(comp) return length
def function[length, parameter[symlink_components]]: constant[ Static method to return the length of the Rock Ridge Symbolic Link record. Parameters: symlink_components - A list containing a string for each of the symbolic link components. Returns: The length of this record in bytes. ] variable[length] assign[=] call[name[RRSLRecord].header_length, parameter[]] for taget[name[comp]] in starred[name[symlink_components]] begin[:] <ast.AugAssign object at 0x7da18bc728f0> return[name[length]]
keyword[def] identifier[length] ( identifier[symlink_components] ): literal[string] identifier[length] = identifier[RRSLRecord] . identifier[header_length] () keyword[for] identifier[comp] keyword[in] identifier[symlink_components] : identifier[length] += identifier[RRSLRecord] . identifier[Component] . identifier[length] ( identifier[comp] ) keyword[return] identifier[length]
def length(symlink_components): # type: (List[bytes]) -> int '\n Static method to return the length of the Rock Ridge Symbolic Link\n record.\n\n Parameters:\n symlink_components - A list containing a string for each of the\n symbolic link components.\n Returns:\n The length of this record in bytes.\n ' length = RRSLRecord.header_length() for comp in symlink_components: length += RRSLRecord.Component.length(comp) # depends on [control=['for'], data=['comp']] return length
def QueueClaimRecords(self, queue_id, item_rdf_type, limit=10000, timeout="30m", start_time=None, record_filter=lambda x: False, max_filtered=1000): """Claims records from a queue. See server/aff4_objects/queue.py.""" now = rdfvalue.RDFDatetime.Now() expiration = rdfvalue.RDFDatetime.Now() + rdfvalue.Duration(timeout) after_urn = None if start_time: after_urn, _, _ = DataStore.CollectionMakeURN( queue_id, start_time.AsMicrosecondsSinceEpoch(), 0, subpath="Records") results = [] filtered_count = 0 for subject, values in DB.ScanAttributes( str(queue_id.Add("Records")), [DataStore.COLLECTION_ATTRIBUTE, DataStore.QUEUE_LOCK_ATTRIBUTE], max_records=4 * limit, after_urn=after_urn): if DataStore.COLLECTION_ATTRIBUTE not in values: # Unlikely case, but could happen if, say, a thread called RefreshClaims # so late that another thread already deleted the record. Go ahead and # clean this up. self.DeleteAttributes(subject, [DataStore.QUEUE_LOCK_ATTRIBUTE]) continue if DataStore.QUEUE_LOCK_ATTRIBUTE in values: timestamp = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch( values[DataStore.QUEUE_LOCK_ATTRIBUTE][1]) if timestamp > now: continue rdf_value = item_rdf_type.FromSerializedString( values[DataStore.COLLECTION_ATTRIBUTE][1]) if record_filter(rdf_value): filtered_count += 1 if max_filtered and filtered_count >= max_filtered: break continue results.append( Record( queue_id=queue_id, timestamp=values[DataStore.COLLECTION_ATTRIBUTE][0], suffix=int(subject[-6:], 16), subpath="Records", value=rdf_value)) self.Set(subject, DataStore.QUEUE_LOCK_ATTRIBUTE, expiration) filtered_count = 0 if len(results) >= limit: break return results
def function[QueueClaimRecords, parameter[self, queue_id, item_rdf_type, limit, timeout, start_time, record_filter, max_filtered]]: constant[Claims records from a queue. See server/aff4_objects/queue.py.] variable[now] assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]] variable[expiration] assign[=] binary_operation[call[name[rdfvalue].RDFDatetime.Now, parameter[]] + call[name[rdfvalue].Duration, parameter[name[timeout]]]] variable[after_urn] assign[=] constant[None] if name[start_time] begin[:] <ast.Tuple object at 0x7da1b1cc0cd0> assign[=] call[name[DataStore].CollectionMakeURN, parameter[name[queue_id], call[name[start_time].AsMicrosecondsSinceEpoch, parameter[]], constant[0]]] variable[results] assign[=] list[[]] variable[filtered_count] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b1cc04f0>, <ast.Name object at 0x7da1b1cc1ff0>]]] in starred[call[name[DB].ScanAttributes, parameter[call[name[str], parameter[call[name[queue_id].Add, parameter[constant[Records]]]]], list[[<ast.Attribute object at 0x7da1b1cc3940>, <ast.Attribute object at 0x7da1b1cc1900>]]]]] begin[:] if compare[name[DataStore].COLLECTION_ATTRIBUTE <ast.NotIn object at 0x7da2590d7190> name[values]] begin[:] call[name[self].DeleteAttributes, parameter[name[subject], list[[<ast.Attribute object at 0x7da1b1cc3670>]]]] continue if compare[name[DataStore].QUEUE_LOCK_ATTRIBUTE in name[values]] begin[:] variable[timestamp] assign[=] call[name[rdfvalue].RDFDatetime.FromMicrosecondsSinceEpoch, parameter[call[call[name[values]][name[DataStore].QUEUE_LOCK_ATTRIBUTE]][constant[1]]]] if compare[name[timestamp] greater[>] name[now]] begin[:] continue variable[rdf_value] assign[=] call[name[item_rdf_type].FromSerializedString, parameter[call[call[name[values]][name[DataStore].COLLECTION_ATTRIBUTE]][constant[1]]]] if call[name[record_filter], parameter[name[rdf_value]]] begin[:] <ast.AugAssign object at 0x7da1b1cc3ac0> if <ast.BoolOp object at 0x7da1b1cc3010> begin[:] break continue call[name[results].append, parameter[call[name[Record], parameter[]]]] call[name[self].Set, parameter[name[subject], name[DataStore].QUEUE_LOCK_ATTRIBUTE, name[expiration]]] variable[filtered_count] assign[=] constant[0] if compare[call[name[len], parameter[name[results]]] greater_or_equal[>=] name[limit]] begin[:] break return[name[results]]
keyword[def] identifier[QueueClaimRecords] ( identifier[self] , identifier[queue_id] , identifier[item_rdf_type] , identifier[limit] = literal[int] , identifier[timeout] = literal[string] , identifier[start_time] = keyword[None] , identifier[record_filter] = keyword[lambda] identifier[x] : keyword[False] , identifier[max_filtered] = literal[int] ): literal[string] identifier[now] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] () identifier[expiration] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] ()+ identifier[rdfvalue] . identifier[Duration] ( identifier[timeout] ) identifier[after_urn] = keyword[None] keyword[if] identifier[start_time] : identifier[after_urn] , identifier[_] , identifier[_] = identifier[DataStore] . identifier[CollectionMakeURN] ( identifier[queue_id] , identifier[start_time] . identifier[AsMicrosecondsSinceEpoch] (), literal[int] , identifier[subpath] = literal[string] ) identifier[results] =[] identifier[filtered_count] = literal[int] keyword[for] identifier[subject] , identifier[values] keyword[in] identifier[DB] . identifier[ScanAttributes] ( identifier[str] ( identifier[queue_id] . identifier[Add] ( literal[string] )), [ identifier[DataStore] . identifier[COLLECTION_ATTRIBUTE] , identifier[DataStore] . identifier[QUEUE_LOCK_ATTRIBUTE] ], identifier[max_records] = literal[int] * identifier[limit] , identifier[after_urn] = identifier[after_urn] ): keyword[if] identifier[DataStore] . identifier[COLLECTION_ATTRIBUTE] keyword[not] keyword[in] identifier[values] : identifier[self] . identifier[DeleteAttributes] ( identifier[subject] ,[ identifier[DataStore] . identifier[QUEUE_LOCK_ATTRIBUTE] ]) keyword[continue] keyword[if] identifier[DataStore] . identifier[QUEUE_LOCK_ATTRIBUTE] keyword[in] identifier[values] : identifier[timestamp] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[FromMicrosecondsSinceEpoch] ( identifier[values] [ identifier[DataStore] . identifier[QUEUE_LOCK_ATTRIBUTE] ][ literal[int] ]) keyword[if] identifier[timestamp] > identifier[now] : keyword[continue] identifier[rdf_value] = identifier[item_rdf_type] . identifier[FromSerializedString] ( identifier[values] [ identifier[DataStore] . identifier[COLLECTION_ATTRIBUTE] ][ literal[int] ]) keyword[if] identifier[record_filter] ( identifier[rdf_value] ): identifier[filtered_count] += literal[int] keyword[if] identifier[max_filtered] keyword[and] identifier[filtered_count] >= identifier[max_filtered] : keyword[break] keyword[continue] identifier[results] . identifier[append] ( identifier[Record] ( identifier[queue_id] = identifier[queue_id] , identifier[timestamp] = identifier[values] [ identifier[DataStore] . identifier[COLLECTION_ATTRIBUTE] ][ literal[int] ], identifier[suffix] = identifier[int] ( identifier[subject] [- literal[int] :], literal[int] ), identifier[subpath] = literal[string] , identifier[value] = identifier[rdf_value] )) identifier[self] . identifier[Set] ( identifier[subject] , identifier[DataStore] . identifier[QUEUE_LOCK_ATTRIBUTE] , identifier[expiration] ) identifier[filtered_count] = literal[int] keyword[if] identifier[len] ( identifier[results] )>= identifier[limit] : keyword[break] keyword[return] identifier[results]
def QueueClaimRecords(self, queue_id, item_rdf_type, limit=10000, timeout='30m', start_time=None, record_filter=lambda x: False, max_filtered=1000): """Claims records from a queue. See server/aff4_objects/queue.py.""" now = rdfvalue.RDFDatetime.Now() expiration = rdfvalue.RDFDatetime.Now() + rdfvalue.Duration(timeout) after_urn = None if start_time: (after_urn, _, _) = DataStore.CollectionMakeURN(queue_id, start_time.AsMicrosecondsSinceEpoch(), 0, subpath='Records') # depends on [control=['if'], data=[]] results = [] filtered_count = 0 for (subject, values) in DB.ScanAttributes(str(queue_id.Add('Records')), [DataStore.COLLECTION_ATTRIBUTE, DataStore.QUEUE_LOCK_ATTRIBUTE], max_records=4 * limit, after_urn=after_urn): if DataStore.COLLECTION_ATTRIBUTE not in values: # Unlikely case, but could happen if, say, a thread called RefreshClaims # so late that another thread already deleted the record. Go ahead and # clean this up. self.DeleteAttributes(subject, [DataStore.QUEUE_LOCK_ATTRIBUTE]) continue # depends on [control=['if'], data=[]] if DataStore.QUEUE_LOCK_ATTRIBUTE in values: timestamp = rdfvalue.RDFDatetime.FromMicrosecondsSinceEpoch(values[DataStore.QUEUE_LOCK_ATTRIBUTE][1]) if timestamp > now: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['values']] rdf_value = item_rdf_type.FromSerializedString(values[DataStore.COLLECTION_ATTRIBUTE][1]) if record_filter(rdf_value): filtered_count += 1 if max_filtered and filtered_count >= max_filtered: break # depends on [control=['if'], data=[]] continue # depends on [control=['if'], data=[]] results.append(Record(queue_id=queue_id, timestamp=values[DataStore.COLLECTION_ATTRIBUTE][0], suffix=int(subject[-6:], 16), subpath='Records', value=rdf_value)) self.Set(subject, DataStore.QUEUE_LOCK_ATTRIBUTE, expiration) filtered_count = 0 if len(results) >= limit: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return results
def Get(self): """Fetch file's data and return proper File object.""" args = vfs_pb2.ApiGetFileDetailsArgs( client_id=self.client_id, file_path=self.path) data = self._context.SendRequest("GetFileDetails", args).file return File(client_id=self.client_id, data=data, context=self._context)
def function[Get, parameter[self]]: constant[Fetch file's data and return proper File object.] variable[args] assign[=] call[name[vfs_pb2].ApiGetFileDetailsArgs, parameter[]] variable[data] assign[=] call[name[self]._context.SendRequest, parameter[constant[GetFileDetails], name[args]]].file return[call[name[File], parameter[]]]
keyword[def] identifier[Get] ( identifier[self] ): literal[string] identifier[args] = identifier[vfs_pb2] . identifier[ApiGetFileDetailsArgs] ( identifier[client_id] = identifier[self] . identifier[client_id] , identifier[file_path] = identifier[self] . identifier[path] ) identifier[data] = identifier[self] . identifier[_context] . identifier[SendRequest] ( literal[string] , identifier[args] ). identifier[file] keyword[return] identifier[File] ( identifier[client_id] = identifier[self] . identifier[client_id] , identifier[data] = identifier[data] , identifier[context] = identifier[self] . identifier[_context] )
def Get(self): """Fetch file's data and return proper File object.""" args = vfs_pb2.ApiGetFileDetailsArgs(client_id=self.client_id, file_path=self.path) data = self._context.SendRequest('GetFileDetails', args).file return File(client_id=self.client_id, data=data, context=self._context)
def load_skills_data() -> dict: """Contains info on how skills should be updated""" skills_data_file = expanduser('~/.mycroft/skills.json') if isfile(skills_data_file): try: with open(skills_data_file) as f: return json.load(f) except json.JSONDecodeError: return {} else: return {}
def function[load_skills_data, parameter[]]: constant[Contains info on how skills should be updated] variable[skills_data_file] assign[=] call[name[expanduser], parameter[constant[~/.mycroft/skills.json]]] if call[name[isfile], parameter[name[skills_data_file]]] begin[:] <ast.Try object at 0x7da204621120>
keyword[def] identifier[load_skills_data] ()-> identifier[dict] : literal[string] identifier[skills_data_file] = identifier[expanduser] ( literal[string] ) keyword[if] identifier[isfile] ( identifier[skills_data_file] ): keyword[try] : keyword[with] identifier[open] ( identifier[skills_data_file] ) keyword[as] identifier[f] : keyword[return] identifier[json] . identifier[load] ( identifier[f] ) keyword[except] identifier[json] . identifier[JSONDecodeError] : keyword[return] {} keyword[else] : keyword[return] {}
def load_skills_data() -> dict: """Contains info on how skills should be updated""" skills_data_file = expanduser('~/.mycroft/skills.json') if isfile(skills_data_file): try: with open(skills_data_file) as f: return json.load(f) # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except json.JSONDecodeError: return {} # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: return {}
def map_to_precursors_on_fly(seqs, names, loci, args): """map sequences to precursors with franpr algorithm to avoid writting on disk""" precursor = precursor_sequence(loci, args.ref).upper() dat = dict() for s, n in itertools.izip(seqs, names): res = pyMatch.Match(precursor, str(s), 1, 3) if res > -1: dat[n] = [res, res + len(s)] logger.debug("mapped in %s: %s out of %s" % (loci, len(dat), len(seqs))) return dat
def function[map_to_precursors_on_fly, parameter[seqs, names, loci, args]]: constant[map sequences to precursors with franpr algorithm to avoid writting on disk] variable[precursor] assign[=] call[call[name[precursor_sequence], parameter[name[loci], name[args].ref]].upper, parameter[]] variable[dat] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c992a40>, <ast.Name object at 0x7da20c992530>]]] in starred[call[name[itertools].izip, parameter[name[seqs], name[names]]]] begin[:] variable[res] assign[=] call[name[pyMatch].Match, parameter[name[precursor], call[name[str], parameter[name[s]]], constant[1], constant[3]]] if compare[name[res] greater[>] <ast.UnaryOp object at 0x7da20c992fe0>] begin[:] call[name[dat]][name[n]] assign[=] list[[<ast.Name object at 0x7da20c9929b0>, <ast.BinOp object at 0x7da20c993010>]] call[name[logger].debug, parameter[binary_operation[constant[mapped in %s: %s out of %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0383970>, <ast.Call object at 0x7da1b03810f0>, <ast.Call object at 0x7da1b03421a0>]]]]] return[name[dat]]
keyword[def] identifier[map_to_precursors_on_fly] ( identifier[seqs] , identifier[names] , identifier[loci] , identifier[args] ): literal[string] identifier[precursor] = identifier[precursor_sequence] ( identifier[loci] , identifier[args] . identifier[ref] ). identifier[upper] () identifier[dat] = identifier[dict] () keyword[for] identifier[s] , identifier[n] keyword[in] identifier[itertools] . identifier[izip] ( identifier[seqs] , identifier[names] ): identifier[res] = identifier[pyMatch] . identifier[Match] ( identifier[precursor] , identifier[str] ( identifier[s] ), literal[int] , literal[int] ) keyword[if] identifier[res] >- literal[int] : identifier[dat] [ identifier[n] ]=[ identifier[res] , identifier[res] + identifier[len] ( identifier[s] )] identifier[logger] . identifier[debug] ( literal[string] %( identifier[loci] , identifier[len] ( identifier[dat] ), identifier[len] ( identifier[seqs] ))) keyword[return] identifier[dat]
def map_to_precursors_on_fly(seqs, names, loci, args): """map sequences to precursors with franpr algorithm to avoid writting on disk""" precursor = precursor_sequence(loci, args.ref).upper() dat = dict() for (s, n) in itertools.izip(seqs, names): res = pyMatch.Match(precursor, str(s), 1, 3) if res > -1: dat[n] = [res, res + len(s)] # depends on [control=['if'], data=['res']] # depends on [control=['for'], data=[]] logger.debug('mapped in %s: %s out of %s' % (loci, len(dat), len(seqs))) return dat
def time_delta(self, end_datetime=None): """ Get a timedelta object """ start_datetime = self._parse_start_datetime('now') end_datetime = self._parse_end_datetime(end_datetime) seconds = end_datetime - start_datetime ts = self.generator.random.randint(*sorted([0, seconds])) return timedelta(seconds=ts)
def function[time_delta, parameter[self, end_datetime]]: constant[ Get a timedelta object ] variable[start_datetime] assign[=] call[name[self]._parse_start_datetime, parameter[constant[now]]] variable[end_datetime] assign[=] call[name[self]._parse_end_datetime, parameter[name[end_datetime]]] variable[seconds] assign[=] binary_operation[name[end_datetime] - name[start_datetime]] variable[ts] assign[=] call[name[self].generator.random.randint, parameter[<ast.Starred object at 0x7da18dc9a440>]] return[call[name[timedelta], parameter[]]]
keyword[def] identifier[time_delta] ( identifier[self] , identifier[end_datetime] = keyword[None] ): literal[string] identifier[start_datetime] = identifier[self] . identifier[_parse_start_datetime] ( literal[string] ) identifier[end_datetime] = identifier[self] . identifier[_parse_end_datetime] ( identifier[end_datetime] ) identifier[seconds] = identifier[end_datetime] - identifier[start_datetime] identifier[ts] = identifier[self] . identifier[generator] . identifier[random] . identifier[randint] (* identifier[sorted] ([ literal[int] , identifier[seconds] ])) keyword[return] identifier[timedelta] ( identifier[seconds] = identifier[ts] )
def time_delta(self, end_datetime=None): """ Get a timedelta object """ start_datetime = self._parse_start_datetime('now') end_datetime = self._parse_end_datetime(end_datetime) seconds = end_datetime - start_datetime ts = self.generator.random.randint(*sorted([0, seconds])) return timedelta(seconds=ts)
def Query(r, what, fields, qfilter=None): """ Retrieves information about resources. @type what: string @param what: Resource name, one of L{constants.QR_VIA_RAPI} @type fields: list of string @param fields: Requested fields @type qfilter: None or list @param qfilter: Query filter @rtype: string @return: job id """ body = { "fields": fields, } if qfilter is not None: body["qfilter"] = body["filter"] = qfilter return r.request("put", "/2/query/%s" % what, content=body)
def function[Query, parameter[r, what, fields, qfilter]]: constant[ Retrieves information about resources. @type what: string @param what: Resource name, one of L{constants.QR_VIA_RAPI} @type fields: list of string @param fields: Requested fields @type qfilter: None or list @param qfilter: Query filter @rtype: string @return: job id ] variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da18f00f6a0>], [<ast.Name object at 0x7da18f00e980>]] if compare[name[qfilter] is_not constant[None]] begin[:] call[name[body]][constant[qfilter]] assign[=] name[qfilter] return[call[name[r].request, parameter[constant[put], binary_operation[constant[/2/query/%s] <ast.Mod object at 0x7da2590d6920> name[what]]]]]
keyword[def] identifier[Query] ( identifier[r] , identifier[what] , identifier[fields] , identifier[qfilter] = keyword[None] ): literal[string] identifier[body] ={ literal[string] : identifier[fields] , } keyword[if] identifier[qfilter] keyword[is] keyword[not] keyword[None] : identifier[body] [ literal[string] ]= identifier[body] [ literal[string] ]= identifier[qfilter] keyword[return] identifier[r] . identifier[request] ( literal[string] , literal[string] % identifier[what] , identifier[content] = identifier[body] )
def Query(r, what, fields, qfilter=None): """ Retrieves information about resources. @type what: string @param what: Resource name, one of L{constants.QR_VIA_RAPI} @type fields: list of string @param fields: Requested fields @type qfilter: None or list @param qfilter: Query filter @rtype: string @return: job id """ body = {'fields': fields} if qfilter is not None: body['qfilter'] = body['filter'] = qfilter # depends on [control=['if'], data=['qfilter']] return r.request('put', '/2/query/%s' % what, content=body)
def to_dict(self): """Render a MessageElement as python dict. :return: Python dict representation :rtype: dict """ obj_dict = super(Table, self).to_dict() rows_dict = [r.to_dict() for r in self.rows] child_dict = { 'type': self.__class__.__name__, 'caption': self.caption, 'rows': rows_dict } obj_dict.update(child_dict) return obj_dict
def function[to_dict, parameter[self]]: constant[Render a MessageElement as python dict. :return: Python dict representation :rtype: dict ] variable[obj_dict] assign[=] call[call[name[super], parameter[name[Table], name[self]]].to_dict, parameter[]] variable[rows_dict] assign[=] <ast.ListComp object at 0x7da204622b00> variable[child_dict] assign[=] dictionary[[<ast.Constant object at 0x7da2046211e0>, <ast.Constant object at 0x7da204621990>, <ast.Constant object at 0x7da204623d90>], [<ast.Attribute object at 0x7da204621d80>, <ast.Attribute object at 0x7da2046218a0>, <ast.Name object at 0x7da204623760>]] call[name[obj_dict].update, parameter[name[child_dict]]] return[name[obj_dict]]
keyword[def] identifier[to_dict] ( identifier[self] ): literal[string] identifier[obj_dict] = identifier[super] ( identifier[Table] , identifier[self] ). identifier[to_dict] () identifier[rows_dict] =[ identifier[r] . identifier[to_dict] () keyword[for] identifier[r] keyword[in] identifier[self] . identifier[rows] ] identifier[child_dict] ={ literal[string] : identifier[self] . identifier[__class__] . identifier[__name__] , literal[string] : identifier[self] . identifier[caption] , literal[string] : identifier[rows_dict] } identifier[obj_dict] . identifier[update] ( identifier[child_dict] ) keyword[return] identifier[obj_dict]
def to_dict(self): """Render a MessageElement as python dict. :return: Python dict representation :rtype: dict """ obj_dict = super(Table, self).to_dict() rows_dict = [r.to_dict() for r in self.rows] child_dict = {'type': self.__class__.__name__, 'caption': self.caption, 'rows': rows_dict} obj_dict.update(child_dict) return obj_dict
def descendents(self): """Iterate over all descendent terms""" for c in self.children: yield c for d in c.descendents: yield d
def function[descendents, parameter[self]]: constant[Iterate over all descendent terms] for taget[name[c]] in starred[name[self].children] begin[:] <ast.Yield object at 0x7da20c6e61a0> for taget[name[d]] in starred[name[c].descendents] begin[:] <ast.Yield object at 0x7da20c6e6530>
keyword[def] identifier[descendents] ( identifier[self] ): literal[string] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[children] : keyword[yield] identifier[c] keyword[for] identifier[d] keyword[in] identifier[c] . identifier[descendents] : keyword[yield] identifier[d]
def descendents(self): """Iterate over all descendent terms""" for c in self.children: yield c for d in c.descendents: yield d # depends on [control=['for'], data=['d']] # depends on [control=['for'], data=['c']]
def del_map(self, event, handle, *args): """ Remove a mapping like event -(arg0, arg1, arg2, ...)-> handle. """ if args: self.base[event].remove((handle, args)) else: self.base[event] = [ind for ind in self.base[event] if ind[0] != handle]
def function[del_map, parameter[self, event, handle]]: constant[ Remove a mapping like event -(arg0, arg1, arg2, ...)-> handle. ] if name[args] begin[:] call[call[name[self].base][name[event]].remove, parameter[tuple[[<ast.Name object at 0x7da1b24c26e0>, <ast.Name object at 0x7da1b24c29b0>]]]]
keyword[def] identifier[del_map] ( identifier[self] , identifier[event] , identifier[handle] ,* identifier[args] ): literal[string] keyword[if] identifier[args] : identifier[self] . identifier[base] [ identifier[event] ]. identifier[remove] (( identifier[handle] , identifier[args] )) keyword[else] : identifier[self] . identifier[base] [ identifier[event] ]=[ identifier[ind] keyword[for] identifier[ind] keyword[in] identifier[self] . identifier[base] [ identifier[event] ] keyword[if] identifier[ind] [ literal[int] ]!= identifier[handle] ]
def del_map(self, event, handle, *args): """ Remove a mapping like event -(arg0, arg1, arg2, ...)-> handle. """ if args: self.base[event].remove((handle, args)) # depends on [control=['if'], data=[]] else: self.base[event] = [ind for ind in self.base[event] if ind[0] != handle]
def extract(context, data): """Extract a compressed file""" with context.http.rehash(data) as result: file_path = result.file_path content_type = result.content_type extract_dir = random_filename(context.work_path) if content_type in ZIP_MIME_TYPES: extracted_files = extract_zip(file_path, extract_dir) elif content_type in TAR_MIME_TYPES: extracted_files = extract_tar(file_path, extract_dir, context) elif content_type in SEVENZIP_MIME_TYPES: extracted_files = extract_7zip(file_path, extract_dir, context) else: context.log.warning( "Unsupported archive content type: %s", content_type ) return extracted_content_hashes = {} for path in extracted_files: relative_path = os.path.relpath(path, extract_dir) content_hash = context.store_file(path) extracted_content_hashes[relative_path] = content_hash data['content_hash'] = content_hash data['file_name'] = relative_path context.emit(data=data.copy())
def function[extract, parameter[context, data]]: constant[Extract a compressed file] with call[name[context].http.rehash, parameter[name[data]]] begin[:] variable[file_path] assign[=] name[result].file_path variable[content_type] assign[=] name[result].content_type variable[extract_dir] assign[=] call[name[random_filename], parameter[name[context].work_path]] if compare[name[content_type] in name[ZIP_MIME_TYPES]] begin[:] variable[extracted_files] assign[=] call[name[extract_zip], parameter[name[file_path], name[extract_dir]]] variable[extracted_content_hashes] assign[=] dictionary[[], []] for taget[name[path]] in starred[name[extracted_files]] begin[:] variable[relative_path] assign[=] call[name[os].path.relpath, parameter[name[path], name[extract_dir]]] variable[content_hash] assign[=] call[name[context].store_file, parameter[name[path]]] call[name[extracted_content_hashes]][name[relative_path]] assign[=] name[content_hash] call[name[data]][constant[content_hash]] assign[=] name[content_hash] call[name[data]][constant[file_name]] assign[=] name[relative_path] call[name[context].emit, parameter[]]
keyword[def] identifier[extract] ( identifier[context] , identifier[data] ): literal[string] keyword[with] identifier[context] . identifier[http] . identifier[rehash] ( identifier[data] ) keyword[as] identifier[result] : identifier[file_path] = identifier[result] . identifier[file_path] identifier[content_type] = identifier[result] . identifier[content_type] identifier[extract_dir] = identifier[random_filename] ( identifier[context] . identifier[work_path] ) keyword[if] identifier[content_type] keyword[in] identifier[ZIP_MIME_TYPES] : identifier[extracted_files] = identifier[extract_zip] ( identifier[file_path] , identifier[extract_dir] ) keyword[elif] identifier[content_type] keyword[in] identifier[TAR_MIME_TYPES] : identifier[extracted_files] = identifier[extract_tar] ( identifier[file_path] , identifier[extract_dir] , identifier[context] ) keyword[elif] identifier[content_type] keyword[in] identifier[SEVENZIP_MIME_TYPES] : identifier[extracted_files] = identifier[extract_7zip] ( identifier[file_path] , identifier[extract_dir] , identifier[context] ) keyword[else] : identifier[context] . identifier[log] . identifier[warning] ( literal[string] , identifier[content_type] ) keyword[return] identifier[extracted_content_hashes] ={} keyword[for] identifier[path] keyword[in] identifier[extracted_files] : identifier[relative_path] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[path] , identifier[extract_dir] ) identifier[content_hash] = identifier[context] . identifier[store_file] ( identifier[path] ) identifier[extracted_content_hashes] [ identifier[relative_path] ]= identifier[content_hash] identifier[data] [ literal[string] ]= identifier[content_hash] identifier[data] [ literal[string] ]= identifier[relative_path] identifier[context] . identifier[emit] ( identifier[data] = identifier[data] . identifier[copy] ())
def extract(context, data): """Extract a compressed file""" with context.http.rehash(data) as result: file_path = result.file_path content_type = result.content_type extract_dir = random_filename(context.work_path) if content_type in ZIP_MIME_TYPES: extracted_files = extract_zip(file_path, extract_dir) # depends on [control=['if'], data=[]] elif content_type in TAR_MIME_TYPES: extracted_files = extract_tar(file_path, extract_dir, context) # depends on [control=['if'], data=[]] elif content_type in SEVENZIP_MIME_TYPES: extracted_files = extract_7zip(file_path, extract_dir, context) # depends on [control=['if'], data=[]] else: context.log.warning('Unsupported archive content type: %s', content_type) return extracted_content_hashes = {} for path in extracted_files: relative_path = os.path.relpath(path, extract_dir) content_hash = context.store_file(path) extracted_content_hashes[relative_path] = content_hash data['content_hash'] = content_hash data['file_name'] = relative_path context.emit(data=data.copy()) # depends on [control=['for'], data=['path']] # depends on [control=['with'], data=['result']]
def as_dict(self): """ Pre-serialisation of the meta data """ drepr = {} drepr["name"] = self.name drepr["time"] = self.time # error pre-serialisation drepr["errors"] = [str(err) for err in self.errors] # warning pre-serialisation drepr["warnings"] = [str(warn) for warn in self.warnings] return drepr
def function[as_dict, parameter[self]]: constant[ Pre-serialisation of the meta data ] variable[drepr] assign[=] dictionary[[], []] call[name[drepr]][constant[name]] assign[=] name[self].name call[name[drepr]][constant[time]] assign[=] name[self].time call[name[drepr]][constant[errors]] assign[=] <ast.ListComp object at 0x7da1b13d46a0> call[name[drepr]][constant[warnings]] assign[=] <ast.ListComp object at 0x7da1b13d48b0> return[name[drepr]]
keyword[def] identifier[as_dict] ( identifier[self] ): literal[string] identifier[drepr] ={} identifier[drepr] [ literal[string] ]= identifier[self] . identifier[name] identifier[drepr] [ literal[string] ]= identifier[self] . identifier[time] identifier[drepr] [ literal[string] ]=[ identifier[str] ( identifier[err] ) keyword[for] identifier[err] keyword[in] identifier[self] . identifier[errors] ] identifier[drepr] [ literal[string] ]=[ identifier[str] ( identifier[warn] ) keyword[for] identifier[warn] keyword[in] identifier[self] . identifier[warnings] ] keyword[return] identifier[drepr]
def as_dict(self): """ Pre-serialisation of the meta data """ drepr = {} drepr['name'] = self.name drepr['time'] = self.time # error pre-serialisation drepr['errors'] = [str(err) for err in self.errors] # warning pre-serialisation drepr['warnings'] = [str(warn) for warn in self.warnings] return drepr
def linkify(self, commands, notificationways): """Create link between objects:: * contacts -> notificationways :param notificationways: notificationways to link :type notificationways: alignak.objects.notificationway.Notificationways :return: None TODO: Clean this function """ self.linkify_with_notificationways(notificationways) self.linkify_command_list_with_commands(commands, 'service_notification_commands') self.linkify_command_list_with_commands(commands, 'host_notification_commands')
def function[linkify, parameter[self, commands, notificationways]]: constant[Create link between objects:: * contacts -> notificationways :param notificationways: notificationways to link :type notificationways: alignak.objects.notificationway.Notificationways :return: None TODO: Clean this function ] call[name[self].linkify_with_notificationways, parameter[name[notificationways]]] call[name[self].linkify_command_list_with_commands, parameter[name[commands], constant[service_notification_commands]]] call[name[self].linkify_command_list_with_commands, parameter[name[commands], constant[host_notification_commands]]]
keyword[def] identifier[linkify] ( identifier[self] , identifier[commands] , identifier[notificationways] ): literal[string] identifier[self] . identifier[linkify_with_notificationways] ( identifier[notificationways] ) identifier[self] . identifier[linkify_command_list_with_commands] ( identifier[commands] , literal[string] ) identifier[self] . identifier[linkify_command_list_with_commands] ( identifier[commands] , literal[string] )
def linkify(self, commands, notificationways): """Create link between objects:: * contacts -> notificationways :param notificationways: notificationways to link :type notificationways: alignak.objects.notificationway.Notificationways :return: None TODO: Clean this function """ self.linkify_with_notificationways(notificationways) self.linkify_command_list_with_commands(commands, 'service_notification_commands') self.linkify_command_list_with_commands(commands, 'host_notification_commands')
def dumps(cls, obj, protocol=0): """ Equivalent to pickle.dumps except that the HoloViews option tree is saved appropriately. """ cls.save_option_state = True val = pickle.dumps(obj, protocol=protocol) cls.save_option_state = False return val
def function[dumps, parameter[cls, obj, protocol]]: constant[ Equivalent to pickle.dumps except that the HoloViews option tree is saved appropriately. ] name[cls].save_option_state assign[=] constant[True] variable[val] assign[=] call[name[pickle].dumps, parameter[name[obj]]] name[cls].save_option_state assign[=] constant[False] return[name[val]]
keyword[def] identifier[dumps] ( identifier[cls] , identifier[obj] , identifier[protocol] = literal[int] ): literal[string] identifier[cls] . identifier[save_option_state] = keyword[True] identifier[val] = identifier[pickle] . identifier[dumps] ( identifier[obj] , identifier[protocol] = identifier[protocol] ) identifier[cls] . identifier[save_option_state] = keyword[False] keyword[return] identifier[val]
def dumps(cls, obj, protocol=0): """ Equivalent to pickle.dumps except that the HoloViews option tree is saved appropriately. """ cls.save_option_state = True val = pickle.dumps(obj, protocol=protocol) cls.save_option_state = False return val
def check_link_status(): '''check status of master links''' tnow = time.time() if mpstate.status.last_message != 0 and tnow > mpstate.status.last_message + 5: say("no link") mpstate.status.heartbeat_error = True for master in mpstate.mav_master: if not master.linkerror and (tnow > master.last_message + 5 or master.portdead): say("link %s down" % (mp_module.MPModule.link_label(master))) master.linkerror = True
def function[check_link_status, parameter[]]: constant[check status of master links] variable[tnow] assign[=] call[name[time].time, parameter[]] if <ast.BoolOp object at 0x7da1b16012a0> begin[:] call[name[say], parameter[constant[no link]]] name[mpstate].status.heartbeat_error assign[=] constant[True] for taget[name[master]] in starred[name[mpstate].mav_master] begin[:] if <ast.BoolOp object at 0x7da1b16002b0> begin[:] call[name[say], parameter[binary_operation[constant[link %s down] <ast.Mod object at 0x7da2590d6920> call[name[mp_module].MPModule.link_label, parameter[name[master]]]]]] name[master].linkerror assign[=] constant[True]
keyword[def] identifier[check_link_status] (): literal[string] identifier[tnow] = identifier[time] . identifier[time] () keyword[if] identifier[mpstate] . identifier[status] . identifier[last_message] != literal[int] keyword[and] identifier[tnow] > identifier[mpstate] . identifier[status] . identifier[last_message] + literal[int] : identifier[say] ( literal[string] ) identifier[mpstate] . identifier[status] . identifier[heartbeat_error] = keyword[True] keyword[for] identifier[master] keyword[in] identifier[mpstate] . identifier[mav_master] : keyword[if] keyword[not] identifier[master] . identifier[linkerror] keyword[and] ( identifier[tnow] > identifier[master] . identifier[last_message] + literal[int] keyword[or] identifier[master] . identifier[portdead] ): identifier[say] ( literal[string] %( identifier[mp_module] . identifier[MPModule] . identifier[link_label] ( identifier[master] ))) identifier[master] . identifier[linkerror] = keyword[True]
def check_link_status(): """check status of master links""" tnow = time.time() if mpstate.status.last_message != 0 and tnow > mpstate.status.last_message + 5: say('no link') mpstate.status.heartbeat_error = True # depends on [control=['if'], data=[]] for master in mpstate.mav_master: if not master.linkerror and (tnow > master.last_message + 5 or master.portdead): say('link %s down' % mp_module.MPModule.link_label(master)) master.linkerror = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['master']]
def tridiag(below=None, diag=None, above=None, name=None): """Creates a matrix with values set above, below, and on the diagonal. Example: ```python tridiag(below=[1., 2., 3.], diag=[4., 5., 6., 7.], above=[8., 9., 10.]) # ==> array([[ 4., 8., 0., 0.], # [ 1., 5., 9., 0.], # [ 0., 2., 6., 10.], # [ 0., 0., 3., 7.]], dtype=float32) ``` Warning: This Op is intended for convenience, not efficiency. Args: below: `Tensor` of shape `[B1, ..., Bb, d-1]` corresponding to the below diagonal part. `None` is logically equivalent to `below = 0`. diag: `Tensor` of shape `[B1, ..., Bb, d]` corresponding to the diagonal part. `None` is logically equivalent to `diag = 0`. above: `Tensor` of shape `[B1, ..., Bb, d-1]` corresponding to the above diagonal part. `None` is logically equivalent to `above = 0`. name: Python `str`. The name to give this op. Returns: tridiag: `Tensor` with values set above, below and on the diagonal. Raises: ValueError: if all inputs are `None`. """ def _pad(x): """Prepends and appends a zero to every vector in a batch of vectors.""" shape = tf.concat([tf.shape(input=x)[:-1], [1]], axis=0) z = tf.zeros(shape, dtype=x.dtype) return tf.concat([z, x, z], axis=-1) def _add(*x): """Adds list of Tensors, ignoring `None`.""" s = None for y in x: if y is None: continue elif s is None: s = y else: s += y if s is None: raise ValueError("Must specify at least one of `below`, `diag`, `above`.") return s with tf.name_scope(name or "tridiag"): if below is not None: below = tf.convert_to_tensor(value=below, name="below") below = tf.linalg.diag(_pad(below))[..., :-1, 1:] if diag is not None: diag = tf.convert_to_tensor(value=diag, name="diag") diag = tf.linalg.diag(diag) if above is not None: above = tf.convert_to_tensor(value=above, name="above") above = tf.linalg.diag(_pad(above))[..., 1:, :-1] # TODO(jvdillon): Consider using scatter_nd instead of creating three full # matrices. return _add(below, diag, above)
def function[tridiag, parameter[below, diag, above, name]]: constant[Creates a matrix with values set above, below, and on the diagonal. Example: ```python tridiag(below=[1., 2., 3.], diag=[4., 5., 6., 7.], above=[8., 9., 10.]) # ==> array([[ 4., 8., 0., 0.], # [ 1., 5., 9., 0.], # [ 0., 2., 6., 10.], # [ 0., 0., 3., 7.]], dtype=float32) ``` Warning: This Op is intended for convenience, not efficiency. Args: below: `Tensor` of shape `[B1, ..., Bb, d-1]` corresponding to the below diagonal part. `None` is logically equivalent to `below = 0`. diag: `Tensor` of shape `[B1, ..., Bb, d]` corresponding to the diagonal part. `None` is logically equivalent to `diag = 0`. above: `Tensor` of shape `[B1, ..., Bb, d-1]` corresponding to the above diagonal part. `None` is logically equivalent to `above = 0`. name: Python `str`. The name to give this op. Returns: tridiag: `Tensor` with values set above, below and on the diagonal. Raises: ValueError: if all inputs are `None`. ] def function[_pad, parameter[x]]: constant[Prepends and appends a zero to every vector in a batch of vectors.] variable[shape] assign[=] call[name[tf].concat, parameter[list[[<ast.Subscript object at 0x7da1b03573a0>, <ast.List object at 0x7da1b0356710>]]]] variable[z] assign[=] call[name[tf].zeros, parameter[name[shape]]] return[call[name[tf].concat, parameter[list[[<ast.Name object at 0x7da1b0356620>, <ast.Name object at 0x7da1b03561a0>, <ast.Name object at 0x7da1b0356350>]]]]] def function[_add, parameter[]]: constant[Adds list of Tensors, ignoring `None`.] variable[s] assign[=] constant[None] for taget[name[y]] in starred[name[x]] begin[:] if compare[name[y] is constant[None]] begin[:] continue if compare[name[s] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0373b80> return[name[s]] with call[name[tf].name_scope, parameter[<ast.BoolOp object at 0x7da1b02fe4d0>]] begin[:] if compare[name[below] is_not constant[None]] begin[:] variable[below] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[below] assign[=] call[call[name[tf].linalg.diag, parameter[call[name[_pad], parameter[name[below]]]]]][tuple[[<ast.Constant object at 0x7da1b02fc580>, <ast.Slice object at 0x7da1b02fd5a0>, <ast.Slice object at 0x7da1b02fdf90>]]] if compare[name[diag] is_not constant[None]] begin[:] variable[diag] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[diag] assign[=] call[name[tf].linalg.diag, parameter[name[diag]]] if compare[name[above] is_not constant[None]] begin[:] variable[above] assign[=] call[name[tf].convert_to_tensor, parameter[]] variable[above] assign[=] call[call[name[tf].linalg.diag, parameter[call[name[_pad], parameter[name[above]]]]]][tuple[[<ast.Constant object at 0x7da1b02fe260>, <ast.Slice object at 0x7da1b02fe590>, <ast.Slice object at 0x7da1b02fe710>]]] return[call[name[_add], parameter[name[below], name[diag], name[above]]]]
keyword[def] identifier[tridiag] ( identifier[below] = keyword[None] , identifier[diag] = keyword[None] , identifier[above] = keyword[None] , identifier[name] = keyword[None] ): literal[string] keyword[def] identifier[_pad] ( identifier[x] ): literal[string] identifier[shape] = identifier[tf] . identifier[concat] ([ identifier[tf] . identifier[shape] ( identifier[input] = identifier[x] )[:- literal[int] ],[ literal[int] ]], identifier[axis] = literal[int] ) identifier[z] = identifier[tf] . identifier[zeros] ( identifier[shape] , identifier[dtype] = identifier[x] . identifier[dtype] ) keyword[return] identifier[tf] . identifier[concat] ([ identifier[z] , identifier[x] , identifier[z] ], identifier[axis] =- literal[int] ) keyword[def] identifier[_add] (* identifier[x] ): literal[string] identifier[s] = keyword[None] keyword[for] identifier[y] keyword[in] identifier[x] : keyword[if] identifier[y] keyword[is] keyword[None] : keyword[continue] keyword[elif] identifier[s] keyword[is] keyword[None] : identifier[s] = identifier[y] keyword[else] : identifier[s] += identifier[y] keyword[if] identifier[s] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[s] keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] keyword[or] literal[string] ): keyword[if] identifier[below] keyword[is] keyword[not] keyword[None] : identifier[below] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[below] , identifier[name] = literal[string] ) identifier[below] = identifier[tf] . identifier[linalg] . identifier[diag] ( identifier[_pad] ( identifier[below] ))[...,:- literal[int] , literal[int] :] keyword[if] identifier[diag] keyword[is] keyword[not] keyword[None] : identifier[diag] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[diag] , identifier[name] = literal[string] ) identifier[diag] = identifier[tf] . identifier[linalg] . identifier[diag] ( identifier[diag] ) keyword[if] identifier[above] keyword[is] keyword[not] keyword[None] : identifier[above] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[above] , identifier[name] = literal[string] ) identifier[above] = identifier[tf] . identifier[linalg] . identifier[diag] ( identifier[_pad] ( identifier[above] ))[..., literal[int] :,:- literal[int] ] keyword[return] identifier[_add] ( identifier[below] , identifier[diag] , identifier[above] )
def tridiag(below=None, diag=None, above=None, name=None): """Creates a matrix with values set above, below, and on the diagonal. Example: ```python tridiag(below=[1., 2., 3.], diag=[4., 5., 6., 7.], above=[8., 9., 10.]) # ==> array([[ 4., 8., 0., 0.], # [ 1., 5., 9., 0.], # [ 0., 2., 6., 10.], # [ 0., 0., 3., 7.]], dtype=float32) ``` Warning: This Op is intended for convenience, not efficiency. Args: below: `Tensor` of shape `[B1, ..., Bb, d-1]` corresponding to the below diagonal part. `None` is logically equivalent to `below = 0`. diag: `Tensor` of shape `[B1, ..., Bb, d]` corresponding to the diagonal part. `None` is logically equivalent to `diag = 0`. above: `Tensor` of shape `[B1, ..., Bb, d-1]` corresponding to the above diagonal part. `None` is logically equivalent to `above = 0`. name: Python `str`. The name to give this op. Returns: tridiag: `Tensor` with values set above, below and on the diagonal. Raises: ValueError: if all inputs are `None`. """ def _pad(x): """Prepends and appends a zero to every vector in a batch of vectors.""" shape = tf.concat([tf.shape(input=x)[:-1], [1]], axis=0) z = tf.zeros(shape, dtype=x.dtype) return tf.concat([z, x, z], axis=-1) def _add(*x): """Adds list of Tensors, ignoring `None`.""" s = None for y in x: if y is None: continue # depends on [control=['if'], data=[]] elif s is None: s = y # depends on [control=['if'], data=['s']] else: s += y # depends on [control=['for'], data=['y']] if s is None: raise ValueError('Must specify at least one of `below`, `diag`, `above`.') # depends on [control=['if'], data=[]] return s with tf.name_scope(name or 'tridiag'): if below is not None: below = tf.convert_to_tensor(value=below, name='below') below = tf.linalg.diag(_pad(below))[..., :-1, 1:] # depends on [control=['if'], data=['below']] if diag is not None: diag = tf.convert_to_tensor(value=diag, name='diag') diag = tf.linalg.diag(diag) # depends on [control=['if'], data=['diag']] if above is not None: above = tf.convert_to_tensor(value=above, name='above') above = tf.linalg.diag(_pad(above))[..., 1:, :-1] # depends on [control=['if'], data=['above']] # TODO(jvdillon): Consider using scatter_nd instead of creating three full # matrices. return _add(below, diag, above) # depends on [control=['with'], data=[]]
def _get_colors(self): """Returns a tuple of two integers representing current colors: (foreground, background).""" try: csbi = _WindowsCSBI.get_info(self.win32_stream_handle) return csbi['fg_color'], csbi['bg_color'] except IOError: return 7, 0
def function[_get_colors, parameter[self]]: constant[Returns a tuple of two integers representing current colors: (foreground, background).] <ast.Try object at 0x7da18eb57df0>
keyword[def] identifier[_get_colors] ( identifier[self] ): literal[string] keyword[try] : identifier[csbi] = identifier[_WindowsCSBI] . identifier[get_info] ( identifier[self] . identifier[win32_stream_handle] ) keyword[return] identifier[csbi] [ literal[string] ], identifier[csbi] [ literal[string] ] keyword[except] identifier[IOError] : keyword[return] literal[int] , literal[int]
def _get_colors(self): """Returns a tuple of two integers representing current colors: (foreground, background).""" try: csbi = _WindowsCSBI.get_info(self.win32_stream_handle) return (csbi['fg_color'], csbi['bg_color']) # depends on [control=['try'], data=[]] except IOError: return (7, 0) # depends on [control=['except'], data=[]]
def is_import(self): """Whether the stage file was created with `dvc import`.""" return not self.cmd and len(self.deps) == 1 and len(self.outs) == 1
def function[is_import, parameter[self]]: constant[Whether the stage file was created with `dvc import`.] return[<ast.BoolOp object at 0x7da1b1f1ba60>]
keyword[def] identifier[is_import] ( identifier[self] ): literal[string] keyword[return] keyword[not] identifier[self] . identifier[cmd] keyword[and] identifier[len] ( identifier[self] . identifier[deps] )== literal[int] keyword[and] identifier[len] ( identifier[self] . identifier[outs] )== literal[int]
def is_import(self): """Whether the stage file was created with `dvc import`.""" return not self.cmd and len(self.deps) == 1 and (len(self.outs) == 1)
def install_metaboard( replace_existing=False, ): """install metaboard. http://metalab.at/wiki/Metaboard """ metaboard = AutoBunch() metaboard.name = 'Metaboard' metaboard.upload.protocol = 'usbasp' metaboard.upload.maximum_size = '14336' metaboard.upload.speed = '19200' metaboard.build.mcu = 'atmega168' metaboard.build.f_cpu = '16000000L' metaboard.build.core = 'arduino' metaboard.upload.disable_flushing = 'true' board_id = 'metaboard' install_board(board_id, metaboard, replace_existing=replace_existing)
def function[install_metaboard, parameter[replace_existing]]: constant[install metaboard. http://metalab.at/wiki/Metaboard ] variable[metaboard] assign[=] call[name[AutoBunch], parameter[]] name[metaboard].name assign[=] constant[Metaboard] name[metaboard].upload.protocol assign[=] constant[usbasp] name[metaboard].upload.maximum_size assign[=] constant[14336] name[metaboard].upload.speed assign[=] constant[19200] name[metaboard].build.mcu assign[=] constant[atmega168] name[metaboard].build.f_cpu assign[=] constant[16000000L] name[metaboard].build.core assign[=] constant[arduino] name[metaboard].upload.disable_flushing assign[=] constant[true] variable[board_id] assign[=] constant[metaboard] call[name[install_board], parameter[name[board_id], name[metaboard]]]
keyword[def] identifier[install_metaboard] ( identifier[replace_existing] = keyword[False] , ): literal[string] identifier[metaboard] = identifier[AutoBunch] () identifier[metaboard] . identifier[name] = literal[string] identifier[metaboard] . identifier[upload] . identifier[protocol] = literal[string] identifier[metaboard] . identifier[upload] . identifier[maximum_size] = literal[string] identifier[metaboard] . identifier[upload] . identifier[speed] = literal[string] identifier[metaboard] . identifier[build] . identifier[mcu] = literal[string] identifier[metaboard] . identifier[build] . identifier[f_cpu] = literal[string] identifier[metaboard] . identifier[build] . identifier[core] = literal[string] identifier[metaboard] . identifier[upload] . identifier[disable_flushing] = literal[string] identifier[board_id] = literal[string] identifier[install_board] ( identifier[board_id] , identifier[metaboard] , identifier[replace_existing] = identifier[replace_existing] )
def install_metaboard(replace_existing=False): """install metaboard. http://metalab.at/wiki/Metaboard """ metaboard = AutoBunch() metaboard.name = 'Metaboard' metaboard.upload.protocol = 'usbasp' metaboard.upload.maximum_size = '14336' metaboard.upload.speed = '19200' metaboard.build.mcu = 'atmega168' metaboard.build.f_cpu = '16000000L' metaboard.build.core = 'arduino' metaboard.upload.disable_flushing = 'true' board_id = 'metaboard' install_board(board_id, metaboard, replace_existing=replace_existing)
def get_active_title(): '''returns the window title of the active window''' if os.name == 'posix': cmd = ['xdotool','getactivewindow','getwindowname'] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) title = proc.communicate()[0].decode('utf-8') else: raise NotImplementedError return title
def function[get_active_title, parameter[]]: constant[returns the window title of the active window] if compare[name[os].name equal[==] constant[posix]] begin[:] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da204564c70>, <ast.Constant object at 0x7da204565330>, <ast.Constant object at 0x7da2045645b0>]] variable[proc] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]] variable[title] assign[=] call[call[call[name[proc].communicate, parameter[]]][constant[0]].decode, parameter[constant[utf-8]]] return[name[title]]
keyword[def] identifier[get_active_title] (): literal[string] keyword[if] identifier[os] . identifier[name] == literal[string] : identifier[cmd] =[ literal[string] , literal[string] , literal[string] ] identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] ) identifier[title] = identifier[proc] . identifier[communicate] ()[ literal[int] ]. identifier[decode] ( literal[string] ) keyword[else] : keyword[raise] identifier[NotImplementedError] keyword[return] identifier[title]
def get_active_title(): """returns the window title of the active window""" if os.name == 'posix': cmd = ['xdotool', 'getactivewindow', 'getwindowname'] proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) title = proc.communicate()[0].decode('utf-8') # depends on [control=['if'], data=[]] else: raise NotImplementedError return title
def main(self): """ A compulsary function that gets the output of the cmus-remote -Q command and converts it to unicode in order for it to be processed and finally output. """ try: # Setting stderr to subprocess.STDOUT seems to stop the error # message returned by the process from being output to STDOUT. cmus_output = subprocess.check_output(['cmus-remote', '-Q'], stderr=subprocess.STDOUT).decode('utf-8') except subprocess.CalledProcessError: return self.output(None, None) if 'duration' in cmus_output: status = self.convert_cmus_output(cmus_output) out_string = self.options['format'] for k, v in status.items(): out_string = out_string.replace(k, v) else: out_string = None return self.output(out_string, out_string)
def function[main, parameter[self]]: constant[ A compulsary function that gets the output of the cmus-remote -Q command and converts it to unicode in order for it to be processed and finally output. ] <ast.Try object at 0x7da18bc73130> if compare[constant[duration] in name[cmus_output]] begin[:] variable[status] assign[=] call[name[self].convert_cmus_output, parameter[name[cmus_output]]] variable[out_string] assign[=] call[name[self].options][constant[format]] for taget[tuple[[<ast.Name object at 0x7da18bc72e00>, <ast.Name object at 0x7da18bc70730>]]] in starred[call[name[status].items, parameter[]]] begin[:] variable[out_string] assign[=] call[name[out_string].replace, parameter[name[k], name[v]]] return[call[name[self].output, parameter[name[out_string], name[out_string]]]]
keyword[def] identifier[main] ( identifier[self] ): literal[string] keyword[try] : identifier[cmus_output] = identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] ], identifier[stderr] = identifier[subprocess] . identifier[STDOUT] ). identifier[decode] ( literal[string] ) keyword[except] identifier[subprocess] . identifier[CalledProcessError] : keyword[return] identifier[self] . identifier[output] ( keyword[None] , keyword[None] ) keyword[if] literal[string] keyword[in] identifier[cmus_output] : identifier[status] = identifier[self] . identifier[convert_cmus_output] ( identifier[cmus_output] ) identifier[out_string] = identifier[self] . identifier[options] [ literal[string] ] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[status] . identifier[items] (): identifier[out_string] = identifier[out_string] . identifier[replace] ( identifier[k] , identifier[v] ) keyword[else] : identifier[out_string] = keyword[None] keyword[return] identifier[self] . identifier[output] ( identifier[out_string] , identifier[out_string] )
def main(self): """ A compulsary function that gets the output of the cmus-remote -Q command and converts it to unicode in order for it to be processed and finally output. """ try: # Setting stderr to subprocess.STDOUT seems to stop the error # message returned by the process from being output to STDOUT. cmus_output = subprocess.check_output(['cmus-remote', '-Q'], stderr=subprocess.STDOUT).decode('utf-8') # depends on [control=['try'], data=[]] except subprocess.CalledProcessError: return self.output(None, None) # depends on [control=['except'], data=[]] if 'duration' in cmus_output: status = self.convert_cmus_output(cmus_output) out_string = self.options['format'] for (k, v) in status.items(): out_string = out_string.replace(k, v) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['cmus_output']] else: out_string = None return self.output(out_string, out_string)
def _handle_actiondefinefunction(self, _): """Handle the ActionDefineFunction action.""" obj = _make_object("ActionDefineFunction") obj.FunctionName = self._get_struct_string() obj.NumParams = unpack_ui16(self._src) for i in range(1, obj.NumParams + 1): setattr(obj, "param" + str(i), self._get_struct_string()) obj.CodeSize = unpack_ui16(self._src) yield obj
def function[_handle_actiondefinefunction, parameter[self, _]]: constant[Handle the ActionDefineFunction action.] variable[obj] assign[=] call[name[_make_object], parameter[constant[ActionDefineFunction]]] name[obj].FunctionName assign[=] call[name[self]._get_struct_string, parameter[]] name[obj].NumParams assign[=] call[name[unpack_ui16], parameter[name[self]._src]] for taget[name[i]] in starred[call[name[range], parameter[constant[1], binary_operation[name[obj].NumParams + constant[1]]]]] begin[:] call[name[setattr], parameter[name[obj], binary_operation[constant[param] + call[name[str], parameter[name[i]]]], call[name[self]._get_struct_string, parameter[]]]] name[obj].CodeSize assign[=] call[name[unpack_ui16], parameter[name[self]._src]] <ast.Yield object at 0x7da18dc9b040>
keyword[def] identifier[_handle_actiondefinefunction] ( identifier[self] , identifier[_] ): literal[string] identifier[obj] = identifier[_make_object] ( literal[string] ) identifier[obj] . identifier[FunctionName] = identifier[self] . identifier[_get_struct_string] () identifier[obj] . identifier[NumParams] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[obj] . identifier[NumParams] + literal[int] ): identifier[setattr] ( identifier[obj] , literal[string] + identifier[str] ( identifier[i] ), identifier[self] . identifier[_get_struct_string] ()) identifier[obj] . identifier[CodeSize] = identifier[unpack_ui16] ( identifier[self] . identifier[_src] ) keyword[yield] identifier[obj]
def _handle_actiondefinefunction(self, _): """Handle the ActionDefineFunction action.""" obj = _make_object('ActionDefineFunction') obj.FunctionName = self._get_struct_string() obj.NumParams = unpack_ui16(self._src) for i in range(1, obj.NumParams + 1): setattr(obj, 'param' + str(i), self._get_struct_string()) # depends on [control=['for'], data=['i']] obj.CodeSize = unpack_ui16(self._src) yield obj
def project_interval_backward(self, c_interval): """ project c_interval on the destination transcript to the source transcript :param c_interval: an :class:`hgvs.interval.Interval` object on the destination transcript :returns: c_interval: an :class:`hgvs.interval.Interval` object on the source transcript """ return self.src_tm.g_to_c(self.dst_tm.c_to_g(c_interval))
def function[project_interval_backward, parameter[self, c_interval]]: constant[ project c_interval on the destination transcript to the source transcript :param c_interval: an :class:`hgvs.interval.Interval` object on the destination transcript :returns: c_interval: an :class:`hgvs.interval.Interval` object on the source transcript ] return[call[name[self].src_tm.g_to_c, parameter[call[name[self].dst_tm.c_to_g, parameter[name[c_interval]]]]]]
keyword[def] identifier[project_interval_backward] ( identifier[self] , identifier[c_interval] ): literal[string] keyword[return] identifier[self] . identifier[src_tm] . identifier[g_to_c] ( identifier[self] . identifier[dst_tm] . identifier[c_to_g] ( identifier[c_interval] ))
def project_interval_backward(self, c_interval): """ project c_interval on the destination transcript to the source transcript :param c_interval: an :class:`hgvs.interval.Interval` object on the destination transcript :returns: c_interval: an :class:`hgvs.interval.Interval` object on the source transcript """ return self.src_tm.g_to_c(self.dst_tm.c_to_g(c_interval))
def recv(self, bufsiz, flags=None): """ Receive data on the connection. :param bufsiz: The maximum number of bytes to read :param flags: (optional) The only supported flag is ``MSG_PEEK``, all other flags are ignored. :return: The string read from the Connection """ buf = _no_zero_allocator("char[]", bufsiz) if flags is not None and flags & socket.MSG_PEEK: result = _lib.SSL_peek(self._ssl, buf, bufsiz) else: result = _lib.SSL_read(self._ssl, buf, bufsiz) self._raise_ssl_error(self._ssl, result) return _ffi.buffer(buf, result)[:]
def function[recv, parameter[self, bufsiz, flags]]: constant[ Receive data on the connection. :param bufsiz: The maximum number of bytes to read :param flags: (optional) The only supported flag is ``MSG_PEEK``, all other flags are ignored. :return: The string read from the Connection ] variable[buf] assign[=] call[name[_no_zero_allocator], parameter[constant[char[]], name[bufsiz]]] if <ast.BoolOp object at 0x7da1b0316470> begin[:] variable[result] assign[=] call[name[_lib].SSL_peek, parameter[name[self]._ssl, name[buf], name[bufsiz]]] call[name[self]._raise_ssl_error, parameter[name[self]._ssl, name[result]]] return[call[call[name[_ffi].buffer, parameter[name[buf], name[result]]]][<ast.Slice object at 0x7da1b024f1c0>]]
keyword[def] identifier[recv] ( identifier[self] , identifier[bufsiz] , identifier[flags] = keyword[None] ): literal[string] identifier[buf] = identifier[_no_zero_allocator] ( literal[string] , identifier[bufsiz] ) keyword[if] identifier[flags] keyword[is] keyword[not] keyword[None] keyword[and] identifier[flags] & identifier[socket] . identifier[MSG_PEEK] : identifier[result] = identifier[_lib] . identifier[SSL_peek] ( identifier[self] . identifier[_ssl] , identifier[buf] , identifier[bufsiz] ) keyword[else] : identifier[result] = identifier[_lib] . identifier[SSL_read] ( identifier[self] . identifier[_ssl] , identifier[buf] , identifier[bufsiz] ) identifier[self] . identifier[_raise_ssl_error] ( identifier[self] . identifier[_ssl] , identifier[result] ) keyword[return] identifier[_ffi] . identifier[buffer] ( identifier[buf] , identifier[result] )[:]
def recv(self, bufsiz, flags=None): """ Receive data on the connection. :param bufsiz: The maximum number of bytes to read :param flags: (optional) The only supported flag is ``MSG_PEEK``, all other flags are ignored. :return: The string read from the Connection """ buf = _no_zero_allocator('char[]', bufsiz) if flags is not None and flags & socket.MSG_PEEK: result = _lib.SSL_peek(self._ssl, buf, bufsiz) # depends on [control=['if'], data=[]] else: result = _lib.SSL_read(self._ssl, buf, bufsiz) self._raise_ssl_error(self._ssl, result) return _ffi.buffer(buf, result)[:]
def _add_token_span_to_document(self, span_element): """ adds an <intro>, <act> or <conclu> token span to the document. """ for token in span_element.text.split(): token_id = self._add_token_to_document(token) if span_element.tag == 'act': # doc can have 0+ acts self._add_spanning_relation('act_{}'.format(self.act_count), token_id) else: # <intro> or <conclu> self._add_spanning_relation(span_element.tag, token_id) if span_element.tag == 'act': self.act_count += 1
def function[_add_token_span_to_document, parameter[self, span_element]]: constant[ adds an <intro>, <act> or <conclu> token span to the document. ] for taget[name[token]] in starred[call[name[span_element].text.split, parameter[]]] begin[:] variable[token_id] assign[=] call[name[self]._add_token_to_document, parameter[name[token]]] if compare[name[span_element].tag equal[==] constant[act]] begin[:] call[name[self]._add_spanning_relation, parameter[call[constant[act_{}].format, parameter[name[self].act_count]], name[token_id]]] if compare[name[span_element].tag equal[==] constant[act]] begin[:] <ast.AugAssign object at 0x7da20c794a00>
keyword[def] identifier[_add_token_span_to_document] ( identifier[self] , identifier[span_element] ): literal[string] keyword[for] identifier[token] keyword[in] identifier[span_element] . identifier[text] . identifier[split] (): identifier[token_id] = identifier[self] . identifier[_add_token_to_document] ( identifier[token] ) keyword[if] identifier[span_element] . identifier[tag] == literal[string] : identifier[self] . identifier[_add_spanning_relation] ( literal[string] . identifier[format] ( identifier[self] . identifier[act_count] ), identifier[token_id] ) keyword[else] : identifier[self] . identifier[_add_spanning_relation] ( identifier[span_element] . identifier[tag] , identifier[token_id] ) keyword[if] identifier[span_element] . identifier[tag] == literal[string] : identifier[self] . identifier[act_count] += literal[int]
def _add_token_span_to_document(self, span_element): """ adds an <intro>, <act> or <conclu> token span to the document. """ for token in span_element.text.split(): token_id = self._add_token_to_document(token) if span_element.tag == 'act': # doc can have 0+ acts self._add_spanning_relation('act_{}'.format(self.act_count), token_id) # depends on [control=['if'], data=[]] else: # <intro> or <conclu> self._add_spanning_relation(span_element.tag, token_id) # depends on [control=['for'], data=['token']] if span_element.tag == 'act': self.act_count += 1 # depends on [control=['if'], data=[]]
def can_claim_fifty_moves(self) -> bool: """ Draw by the fifty-move rule can be claimed once the clock of halfmoves since the last capture or pawn move becomes equal or greater to 100 and the side to move still has a legal move they can make. """ # Fifty-move rule. if self.halfmove_clock >= 100: if any(self.generate_legal_moves()): return True return False
def function[can_claim_fifty_moves, parameter[self]]: constant[ Draw by the fifty-move rule can be claimed once the clock of halfmoves since the last capture or pawn move becomes equal or greater to 100 and the side to move still has a legal move they can make. ] if compare[name[self].halfmove_clock greater_or_equal[>=] constant[100]] begin[:] if call[name[any], parameter[call[name[self].generate_legal_moves, parameter[]]]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[can_claim_fifty_moves] ( identifier[self] )-> identifier[bool] : literal[string] keyword[if] identifier[self] . identifier[halfmove_clock] >= literal[int] : keyword[if] identifier[any] ( identifier[self] . identifier[generate_legal_moves] ()): keyword[return] keyword[True] keyword[return] keyword[False]
def can_claim_fifty_moves(self) -> bool: """ Draw by the fifty-move rule can be claimed once the clock of halfmoves since the last capture or pawn move becomes equal or greater to 100 and the side to move still has a legal move they can make. """ # Fifty-move rule. if self.halfmove_clock >= 100: if any(self.generate_legal_moves()): return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return False
def buscar_healthchecks(self, id_ambiente_vip): """Search healthcheck ​by environmentvip_id :return: Dictionary with the following structure: :: {'healthcheck_opt': [{'name': <name>, 'id': <id>},...]} :raise InvalidParameterError: Environment VIP identifier is null and invalid. :raise EnvironmentVipNotFoundError: Environment VIP not registered. :raise InvalidParameterError: id_ambiente_vip is null and invalid. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ url = 'environment-vip/get/healthcheck/' + str(id_ambiente_vip) code, xml = self.submit(None, 'GET', url) return self.response(code, xml, ['healthcheck_opt'])
def function[buscar_healthchecks, parameter[self, id_ambiente_vip]]: constant[Search healthcheck ​by environmentvip_id :return: Dictionary with the following structure: :: {'healthcheck_opt': [{'name': <name>, 'id': <id>},...]} :raise InvalidParameterError: Environment VIP identifier is null and invalid. :raise EnvironmentVipNotFoundError: Environment VIP not registered. :raise InvalidParameterError: id_ambiente_vip is null and invalid. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. ] variable[url] assign[=] binary_operation[constant[environment-vip/get/healthcheck/] + call[name[str], parameter[name[id_ambiente_vip]]]] <ast.Tuple object at 0x7da1b2346770> assign[=] call[name[self].submit, parameter[constant[None], constant[GET], name[url]]] return[call[name[self].response, parameter[name[code], name[xml], list[[<ast.Constant object at 0x7da1b2346200>]]]]]
keyword[def] identifier[buscar_healthchecks] ( identifier[self] , identifier[id_ambiente_vip] ): literal[string] identifier[url] = literal[string] + identifier[str] ( identifier[id_ambiente_vip] ) identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ( keyword[None] , literal[string] , identifier[url] ) keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] ,[ literal[string] ])
def buscar_healthchecks(self, id_ambiente_vip): """Search healthcheck \u200bby environmentvip_id :return: Dictionary with the following structure: :: {'healthcheck_opt': [{'name': <name>, 'id': <id>},...]} :raise InvalidParameterError: Environment VIP identifier is null and invalid. :raise EnvironmentVipNotFoundError: Environment VIP not registered. :raise InvalidParameterError: id_ambiente_vip is null and invalid. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response. """ url = 'environment-vip/get/healthcheck/' + str(id_ambiente_vip) (code, xml) = self.submit(None, 'GET', url) return self.response(code, xml, ['healthcheck_opt'])
def _handle_exception(ignore_callback_errors, print_callback_errors, obj, cb_event=None, node=None): """Helper for prining errors in callbacks See EventEmitter._invoke_callback for a use example. """ if not hasattr(obj, '_vispy_err_registry'): obj._vispy_err_registry = {} registry = obj._vispy_err_registry if cb_event is not None: cb, event = cb_event exp_type = 'callback' else: exp_type = 'node' type_, value, tb = sys.exc_info() tb = tb.tb_next # Skip *this* frame sys.last_type = type_ sys.last_value = value sys.last_traceback = tb del tb # Get rid of it in this namespace # Handle if not ignore_callback_errors: raise if print_callback_errors != "never": this_print = 'full' if print_callback_errors in ('first', 'reminders'): # need to check to see if we've hit this yet if exp_type == 'callback': key = repr(cb) + repr(event) else: key = repr(node) if key in registry: registry[key] += 1 if print_callback_errors == 'first': this_print = None else: # reminders ii = registry[key] # Use logarithmic selection # (1, 2, ..., 10, 20, ..., 100, 200, ...) if ii == (2 ** int(np.log2(ii))): this_print = ii else: this_print = None else: registry[key] = 1 if this_print == 'full': logger.log_exception() if exp_type == 'callback': logger.error("Invoking %s for %s" % (cb, event)) else: # == 'node': logger.error("Drawing node %s" % node) elif this_print is not None: if exp_type == 'callback': logger.error("Invoking %s repeat %s" % (cb, this_print)) else: # == 'node': logger.error("Drawing node %s repeat %s" % (node, this_print))
def function[_handle_exception, parameter[ignore_callback_errors, print_callback_errors, obj, cb_event, node]]: constant[Helper for prining errors in callbacks See EventEmitter._invoke_callback for a use example. ] if <ast.UnaryOp object at 0x7da1b0e7a4a0> begin[:] name[obj]._vispy_err_registry assign[=] dictionary[[], []] variable[registry] assign[=] name[obj]._vispy_err_registry if compare[name[cb_event] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da1b0e7a0b0> assign[=] name[cb_event] variable[exp_type] assign[=] constant[callback] <ast.Tuple object at 0x7da1b0e79f60> assign[=] call[name[sys].exc_info, parameter[]] variable[tb] assign[=] name[tb].tb_next name[sys].last_type assign[=] name[type_] name[sys].last_value assign[=] name[value] name[sys].last_traceback assign[=] name[tb] <ast.Delete object at 0x7da1b0e7b5b0> if <ast.UnaryOp object at 0x7da1b0e7ad40> begin[:] <ast.Raise object at 0x7da1b0e7bee0> if compare[name[print_callback_errors] not_equal[!=] constant[never]] begin[:] variable[this_print] assign[=] constant[full] if compare[name[print_callback_errors] in tuple[[<ast.Constant object at 0x7da1b0e783a0>, <ast.Constant object at 0x7da1b0e78130>]]] begin[:] if compare[name[exp_type] equal[==] constant[callback]] begin[:] variable[key] assign[=] binary_operation[call[name[repr], parameter[name[cb]]] + call[name[repr], parameter[name[event]]]] if compare[name[key] in name[registry]] begin[:] <ast.AugAssign object at 0x7da1b0e7ae00> if compare[name[print_callback_errors] equal[==] constant[first]] begin[:] variable[this_print] assign[=] constant[None] if compare[name[this_print] equal[==] constant[full]] begin[:] call[name[logger].log_exception, parameter[]] if compare[name[exp_type] equal[==] constant[callback]] begin[:] call[name[logger].error, parameter[binary_operation[constant[Invoking %s for %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e7a7a0>, <ast.Name object at 0x7da1b0e784c0>]]]]]
keyword[def] identifier[_handle_exception] ( identifier[ignore_callback_errors] , identifier[print_callback_errors] , identifier[obj] , identifier[cb_event] = keyword[None] , identifier[node] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , literal[string] ): identifier[obj] . identifier[_vispy_err_registry] ={} identifier[registry] = identifier[obj] . identifier[_vispy_err_registry] keyword[if] identifier[cb_event] keyword[is] keyword[not] keyword[None] : identifier[cb] , identifier[event] = identifier[cb_event] identifier[exp_type] = literal[string] keyword[else] : identifier[exp_type] = literal[string] identifier[type_] , identifier[value] , identifier[tb] = identifier[sys] . identifier[exc_info] () identifier[tb] = identifier[tb] . identifier[tb_next] identifier[sys] . identifier[last_type] = identifier[type_] identifier[sys] . identifier[last_value] = identifier[value] identifier[sys] . identifier[last_traceback] = identifier[tb] keyword[del] identifier[tb] keyword[if] keyword[not] identifier[ignore_callback_errors] : keyword[raise] keyword[if] identifier[print_callback_errors] != literal[string] : identifier[this_print] = literal[string] keyword[if] identifier[print_callback_errors] keyword[in] ( literal[string] , literal[string] ): keyword[if] identifier[exp_type] == literal[string] : identifier[key] = identifier[repr] ( identifier[cb] )+ identifier[repr] ( identifier[event] ) keyword[else] : identifier[key] = identifier[repr] ( identifier[node] ) keyword[if] identifier[key] keyword[in] identifier[registry] : identifier[registry] [ identifier[key] ]+= literal[int] keyword[if] identifier[print_callback_errors] == literal[string] : identifier[this_print] = keyword[None] keyword[else] : identifier[ii] = identifier[registry] [ identifier[key] ] keyword[if] identifier[ii] ==( literal[int] ** identifier[int] ( identifier[np] . identifier[log2] ( identifier[ii] ))): identifier[this_print] = identifier[ii] keyword[else] : identifier[this_print] = keyword[None] keyword[else] : identifier[registry] [ identifier[key] ]= literal[int] keyword[if] identifier[this_print] == literal[string] : identifier[logger] . identifier[log_exception] () keyword[if] identifier[exp_type] == literal[string] : identifier[logger] . identifier[error] ( literal[string] %( identifier[cb] , identifier[event] )) keyword[else] : identifier[logger] . identifier[error] ( literal[string] % identifier[node] ) keyword[elif] identifier[this_print] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[exp_type] == literal[string] : identifier[logger] . identifier[error] ( literal[string] %( identifier[cb] , identifier[this_print] )) keyword[else] : identifier[logger] . identifier[error] ( literal[string] %( identifier[node] , identifier[this_print] ))
def _handle_exception(ignore_callback_errors, print_callback_errors, obj, cb_event=None, node=None): """Helper for prining errors in callbacks See EventEmitter._invoke_callback for a use example. """ if not hasattr(obj, '_vispy_err_registry'): obj._vispy_err_registry = {} # depends on [control=['if'], data=[]] registry = obj._vispy_err_registry if cb_event is not None: (cb, event) = cb_event exp_type = 'callback' # depends on [control=['if'], data=['cb_event']] else: exp_type = 'node' (type_, value, tb) = sys.exc_info() tb = tb.tb_next # Skip *this* frame sys.last_type = type_ sys.last_value = value sys.last_traceback = tb del tb # Get rid of it in this namespace # Handle if not ignore_callback_errors: raise # depends on [control=['if'], data=[]] if print_callback_errors != 'never': this_print = 'full' if print_callback_errors in ('first', 'reminders'): # need to check to see if we've hit this yet if exp_type == 'callback': key = repr(cb) + repr(event) # depends on [control=['if'], data=[]] else: key = repr(node) if key in registry: registry[key] += 1 if print_callback_errors == 'first': this_print = None # depends on [control=['if'], data=[]] else: # reminders ii = registry[key] # Use logarithmic selection # (1, 2, ..., 10, 20, ..., 100, 200, ...) if ii == 2 ** int(np.log2(ii)): this_print = ii # depends on [control=['if'], data=['ii']] else: this_print = None # depends on [control=['if'], data=['key', 'registry']] else: registry[key] = 1 # depends on [control=['if'], data=['print_callback_errors']] if this_print == 'full': logger.log_exception() if exp_type == 'callback': logger.error('Invoking %s for %s' % (cb, event)) # depends on [control=['if'], data=[]] else: # == 'node': logger.error('Drawing node %s' % node) # depends on [control=['if'], data=[]] elif this_print is not None: if exp_type == 'callback': logger.error('Invoking %s repeat %s' % (cb, this_print)) # depends on [control=['if'], data=[]] else: # == 'node': logger.error('Drawing node %s repeat %s' % (node, this_print)) # depends on [control=['if'], data=['this_print']] # depends on [control=['if'], data=['print_callback_errors']]
def retrieve_order(self, order_id): """Retrieve details on a single order.""" response = self.request(E.retrieveOrderSslCertRequest( E.id(order_id) )) return response.as_model(SSLOrder)
def function[retrieve_order, parameter[self, order_id]]: constant[Retrieve details on a single order.] variable[response] assign[=] call[name[self].request, parameter[call[name[E].retrieveOrderSslCertRequest, parameter[call[name[E].id, parameter[name[order_id]]]]]]] return[call[name[response].as_model, parameter[name[SSLOrder]]]]
keyword[def] identifier[retrieve_order] ( identifier[self] , identifier[order_id] ): literal[string] identifier[response] = identifier[self] . identifier[request] ( identifier[E] . identifier[retrieveOrderSslCertRequest] ( identifier[E] . identifier[id] ( identifier[order_id] ) )) keyword[return] identifier[response] . identifier[as_model] ( identifier[SSLOrder] )
def retrieve_order(self, order_id): """Retrieve details on a single order.""" response = self.request(E.retrieveOrderSslCertRequest(E.id(order_id))) return response.as_model(SSLOrder)
def acquire_code(args, session, session3): """returns the user's token serial number, MFA token code, and an error code.""" serial_number = find_mfa_for_user(args.serial_number, session, session3) if not serial_number: print("There are no MFA devices associated with this user.", file=sys.stderr) return None, None, USER_RECOVERABLE_ERROR token_code = args.token_code if token_code is None: while token_code is None or len(token_code) != 6: token_code = getpass.getpass("MFA Token Code: ") return serial_number, token_code, OK
def function[acquire_code, parameter[args, session, session3]]: constant[returns the user's token serial number, MFA token code, and an error code.] variable[serial_number] assign[=] call[name[find_mfa_for_user], parameter[name[args].serial_number, name[session], name[session3]]] if <ast.UnaryOp object at 0x7da1b0465bd0> begin[:] call[name[print], parameter[constant[There are no MFA devices associated with this user.]]] return[tuple[[<ast.Constant object at 0x7da1b0466830>, <ast.Constant object at 0x7da1b0464160>, <ast.Name object at 0x7da1b04678b0>]]] variable[token_code] assign[=] name[args].token_code if compare[name[token_code] is constant[None]] begin[:] while <ast.BoolOp object at 0x7da1b0464130> begin[:] variable[token_code] assign[=] call[name[getpass].getpass, parameter[constant[MFA Token Code: ]]] return[tuple[[<ast.Name object at 0x7da1b0467280>, <ast.Name object at 0x7da1b0467370>, <ast.Name object at 0x7da1b04647f0>]]]
keyword[def] identifier[acquire_code] ( identifier[args] , identifier[session] , identifier[session3] ): literal[string] identifier[serial_number] = identifier[find_mfa_for_user] ( identifier[args] . identifier[serial_number] , identifier[session] , identifier[session3] ) keyword[if] keyword[not] identifier[serial_number] : identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) keyword[return] keyword[None] , keyword[None] , identifier[USER_RECOVERABLE_ERROR] identifier[token_code] = identifier[args] . identifier[token_code] keyword[if] identifier[token_code] keyword[is] keyword[None] : keyword[while] identifier[token_code] keyword[is] keyword[None] keyword[or] identifier[len] ( identifier[token_code] )!= literal[int] : identifier[token_code] = identifier[getpass] . identifier[getpass] ( literal[string] ) keyword[return] identifier[serial_number] , identifier[token_code] , identifier[OK]
def acquire_code(args, session, session3): """returns the user's token serial number, MFA token code, and an error code.""" serial_number = find_mfa_for_user(args.serial_number, session, session3) if not serial_number: print('There are no MFA devices associated with this user.', file=sys.stderr) return (None, None, USER_RECOVERABLE_ERROR) # depends on [control=['if'], data=[]] token_code = args.token_code if token_code is None: while token_code is None or len(token_code) != 6: token_code = getpass.getpass('MFA Token Code: ') # depends on [control=['while'], data=[]] # depends on [control=['if'], data=['token_code']] return (serial_number, token_code, OK)
def find(self, path, all=False): """ Work out the uncached name of the file and look that up instead """ try: start, _, extn = path.rsplit('.', 2) except ValueError: return [] path = '.'.join((start, extn)) return find(path, all=all) or []
def function[find, parameter[self, path, all]]: constant[ Work out the uncached name of the file and look that up instead ] <ast.Try object at 0x7da1b07941f0> variable[path] assign[=] call[constant[.].join, parameter[tuple[[<ast.Name object at 0x7da1b0845750>, <ast.Name object at 0x7da1b0847850>]]]] return[<ast.BoolOp object at 0x7da1b0847160>]
keyword[def] identifier[find] ( identifier[self] , identifier[path] , identifier[all] = keyword[False] ): literal[string] keyword[try] : identifier[start] , identifier[_] , identifier[extn] = identifier[path] . identifier[rsplit] ( literal[string] , literal[int] ) keyword[except] identifier[ValueError] : keyword[return] [] identifier[path] = literal[string] . identifier[join] (( identifier[start] , identifier[extn] )) keyword[return] identifier[find] ( identifier[path] , identifier[all] = identifier[all] ) keyword[or] []
def find(self, path, all=False): """ Work out the uncached name of the file and look that up instead """ try: (start, _, extn) = path.rsplit('.', 2) # depends on [control=['try'], data=[]] except ValueError: return [] # depends on [control=['except'], data=[]] path = '.'.join((start, extn)) return find(path, all=all) or []
def parse_requirements(requirements_filename): """read in the dependencies from the requirements files """ dependencies, dependency_links = [], [] requirements_dir = os.path.dirname(requirements_filename) with open(requirements_filename, 'r') as stream: for line in stream: line = line.strip() if line.startswith("-r"): filename = os.path.join(requirements_dir, line[2:].strip()) _dependencies, _dependency_links = parse_requirements(filename) dependencies.extend(_dependencies) dependency_links.extend(_dependency_links) elif line.startswith("http"): dependency_links.append(line) else: package = line.split('#')[0] if package: dependencies.append(package) return dependencies, dependency_links
def function[parse_requirements, parameter[requirements_filename]]: constant[read in the dependencies from the requirements files ] <ast.Tuple object at 0x7da1b18bd240> assign[=] tuple[[<ast.List object at 0x7da1b18bfd00>, <ast.List object at 0x7da1b18bfac0>]] variable[requirements_dir] assign[=] call[name[os].path.dirname, parameter[name[requirements_filename]]] with call[name[open], parameter[name[requirements_filename], constant[r]]] begin[:] for taget[name[line]] in starred[name[stream]] begin[:] variable[line] assign[=] call[name[line].strip, parameter[]] if call[name[line].startswith, parameter[constant[-r]]] begin[:] variable[filename] assign[=] call[name[os].path.join, parameter[name[requirements_dir], call[call[name[line]][<ast.Slice object at 0x7da1b1790160>].strip, parameter[]]]] <ast.Tuple object at 0x7da1b1790310> assign[=] call[name[parse_requirements], parameter[name[filename]]] call[name[dependencies].extend, parameter[name[_dependencies]]] call[name[dependency_links].extend, parameter[name[_dependency_links]]] return[tuple[[<ast.Name object at 0x7da1b17d6740>, <ast.Name object at 0x7da1b17d6a10>]]]
keyword[def] identifier[parse_requirements] ( identifier[requirements_filename] ): literal[string] identifier[dependencies] , identifier[dependency_links] =[],[] identifier[requirements_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[requirements_filename] ) keyword[with] identifier[open] ( identifier[requirements_filename] , literal[string] ) keyword[as] identifier[stream] : keyword[for] identifier[line] keyword[in] identifier[stream] : identifier[line] = identifier[line] . identifier[strip] () keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[requirements_dir] , identifier[line] [ literal[int] :]. identifier[strip] ()) identifier[_dependencies] , identifier[_dependency_links] = identifier[parse_requirements] ( identifier[filename] ) identifier[dependencies] . identifier[extend] ( identifier[_dependencies] ) identifier[dependency_links] . identifier[extend] ( identifier[_dependency_links] ) keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ): identifier[dependency_links] . identifier[append] ( identifier[line] ) keyword[else] : identifier[package] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[package] : identifier[dependencies] . identifier[append] ( identifier[package] ) keyword[return] identifier[dependencies] , identifier[dependency_links]
def parse_requirements(requirements_filename): """read in the dependencies from the requirements files """ (dependencies, dependency_links) = ([], []) requirements_dir = os.path.dirname(requirements_filename) with open(requirements_filename, 'r') as stream: for line in stream: line = line.strip() if line.startswith('-r'): filename = os.path.join(requirements_dir, line[2:].strip()) (_dependencies, _dependency_links) = parse_requirements(filename) dependencies.extend(_dependencies) dependency_links.extend(_dependency_links) # depends on [control=['if'], data=[]] elif line.startswith('http'): dependency_links.append(line) # depends on [control=['if'], data=[]] else: package = line.split('#')[0] if package: dependencies.append(package) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['stream']] return (dependencies, dependency_links)
def explain_prediction_linear_regressor(reg, doc, vec=None, top=None, top_targets=None, target_names=None, targets=None, feature_names=None, feature_re=None, feature_filter=None, vectorized=False): """ Explain prediction of a linear regressor. See :func:`eli5.explain_prediction` for description of ``top``, ``top_targets``, ``target_names``, ``targets``, ``feature_names``, ``feature_re`` and ``feature_filter`` parameters. ``vec`` is a vectorizer instance used to transform raw features to the input of the classifier ``clf``; you can pass it instead of ``feature_names``. ``vectorized`` is a flag which tells eli5 if ``doc`` should be passed through ``vec`` or not. By default it is False, meaning that if ``vec`` is not None, ``vec.transform([doc])`` is passed to the regressor ``reg``. Set it to True if you're passing ``vec``, but ``doc`` is already vectorized. """ if isinstance(reg, (SVR, NuSVR)) and reg.kernel != 'linear': return explain_prediction_sklearn_not_supported(reg, doc) vec, feature_names = handle_vec(reg, doc, vec, vectorized, feature_names) X = get_X(doc, vec=vec, vectorized=vectorized, to_dense=True) score, = reg.predict(X) if has_intercept(reg): X = add_intercept(X) x = get_X0(X) feature_names, flt_indices = feature_names.handle_filter( feature_filter, feature_re, x) res = Explanation( estimator=repr(reg), method='linear model', targets=[], is_regression=True, ) assert res.targets is not None _weights = _linear_weights(reg, x, top, feature_names, flt_indices) names = get_default_target_names(reg) display_names = get_target_display_names(names, target_names, targets, top_targets, score) if is_multitarget_regressor(reg): for label_id, label in display_names: target_expl = TargetExplanation( target=label, feature_weights=_weights(label_id), score=score[label_id], ) add_weighted_spans(doc, vec, vectorized, target_expl) res.targets.append(target_expl) else: target_expl = TargetExplanation( target=display_names[0][1], feature_weights=_weights(0), score=score, ) add_weighted_spans(doc, vec, vectorized, target_expl) res.targets.append(target_expl) return res
def function[explain_prediction_linear_regressor, parameter[reg, doc, vec, top, top_targets, target_names, targets, feature_names, feature_re, feature_filter, vectorized]]: constant[ Explain prediction of a linear regressor. See :func:`eli5.explain_prediction` for description of ``top``, ``top_targets``, ``target_names``, ``targets``, ``feature_names``, ``feature_re`` and ``feature_filter`` parameters. ``vec`` is a vectorizer instance used to transform raw features to the input of the classifier ``clf``; you can pass it instead of ``feature_names``. ``vectorized`` is a flag which tells eli5 if ``doc`` should be passed through ``vec`` or not. By default it is False, meaning that if ``vec`` is not None, ``vec.transform([doc])`` is passed to the regressor ``reg``. Set it to True if you're passing ``vec``, but ``doc`` is already vectorized. ] if <ast.BoolOp object at 0x7da18eb57040> begin[:] return[call[name[explain_prediction_sklearn_not_supported], parameter[name[reg], name[doc]]]] <ast.Tuple object at 0x7da18eb55f60> assign[=] call[name[handle_vec], parameter[name[reg], name[doc], name[vec], name[vectorized], name[feature_names]]] variable[X] assign[=] call[name[get_X], parameter[name[doc]]] <ast.Tuple object at 0x7da18eb55840> assign[=] call[name[reg].predict, parameter[name[X]]] if call[name[has_intercept], parameter[name[reg]]] begin[:] variable[X] assign[=] call[name[add_intercept], parameter[name[X]]] variable[x] assign[=] call[name[get_X0], parameter[name[X]]] <ast.Tuple object at 0x7da18eb546d0> assign[=] call[name[feature_names].handle_filter, parameter[name[feature_filter], name[feature_re], name[x]]] variable[res] assign[=] call[name[Explanation], parameter[]] assert[compare[name[res].targets is_not constant[None]]] variable[_weights] assign[=] call[name[_linear_weights], parameter[name[reg], name[x], name[top], name[feature_names], name[flt_indices]]] variable[names] assign[=] call[name[get_default_target_names], parameter[name[reg]]] variable[display_names] assign[=] call[name[get_target_display_names], parameter[name[names], name[target_names], name[targets], name[top_targets], name[score]]] if call[name[is_multitarget_regressor], parameter[name[reg]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18eb559f0>, <ast.Name object at 0x7da18eb54af0>]]] in starred[name[display_names]] begin[:] variable[target_expl] assign[=] call[name[TargetExplanation], parameter[]] call[name[add_weighted_spans], parameter[name[doc], name[vec], name[vectorized], name[target_expl]]] call[name[res].targets.append, parameter[name[target_expl]]] return[name[res]]
keyword[def] identifier[explain_prediction_linear_regressor] ( identifier[reg] , identifier[doc] , identifier[vec] = keyword[None] , identifier[top] = keyword[None] , identifier[top_targets] = keyword[None] , identifier[target_names] = keyword[None] , identifier[targets] = keyword[None] , identifier[feature_names] = keyword[None] , identifier[feature_re] = keyword[None] , identifier[feature_filter] = keyword[None] , identifier[vectorized] = keyword[False] ): literal[string] keyword[if] identifier[isinstance] ( identifier[reg] ,( identifier[SVR] , identifier[NuSVR] )) keyword[and] identifier[reg] . identifier[kernel] != literal[string] : keyword[return] identifier[explain_prediction_sklearn_not_supported] ( identifier[reg] , identifier[doc] ) identifier[vec] , identifier[feature_names] = identifier[handle_vec] ( identifier[reg] , identifier[doc] , identifier[vec] , identifier[vectorized] , identifier[feature_names] ) identifier[X] = identifier[get_X] ( identifier[doc] , identifier[vec] = identifier[vec] , identifier[vectorized] = identifier[vectorized] , identifier[to_dense] = keyword[True] ) identifier[score] ,= identifier[reg] . identifier[predict] ( identifier[X] ) keyword[if] identifier[has_intercept] ( identifier[reg] ): identifier[X] = identifier[add_intercept] ( identifier[X] ) identifier[x] = identifier[get_X0] ( identifier[X] ) identifier[feature_names] , identifier[flt_indices] = identifier[feature_names] . identifier[handle_filter] ( identifier[feature_filter] , identifier[feature_re] , identifier[x] ) identifier[res] = identifier[Explanation] ( identifier[estimator] = identifier[repr] ( identifier[reg] ), identifier[method] = literal[string] , identifier[targets] =[], identifier[is_regression] = keyword[True] , ) keyword[assert] identifier[res] . identifier[targets] keyword[is] keyword[not] keyword[None] identifier[_weights] = identifier[_linear_weights] ( identifier[reg] , identifier[x] , identifier[top] , identifier[feature_names] , identifier[flt_indices] ) identifier[names] = identifier[get_default_target_names] ( identifier[reg] ) identifier[display_names] = identifier[get_target_display_names] ( identifier[names] , identifier[target_names] , identifier[targets] , identifier[top_targets] , identifier[score] ) keyword[if] identifier[is_multitarget_regressor] ( identifier[reg] ): keyword[for] identifier[label_id] , identifier[label] keyword[in] identifier[display_names] : identifier[target_expl] = identifier[TargetExplanation] ( identifier[target] = identifier[label] , identifier[feature_weights] = identifier[_weights] ( identifier[label_id] ), identifier[score] = identifier[score] [ identifier[label_id] ], ) identifier[add_weighted_spans] ( identifier[doc] , identifier[vec] , identifier[vectorized] , identifier[target_expl] ) identifier[res] . identifier[targets] . identifier[append] ( identifier[target_expl] ) keyword[else] : identifier[target_expl] = identifier[TargetExplanation] ( identifier[target] = identifier[display_names] [ literal[int] ][ literal[int] ], identifier[feature_weights] = identifier[_weights] ( literal[int] ), identifier[score] = identifier[score] , ) identifier[add_weighted_spans] ( identifier[doc] , identifier[vec] , identifier[vectorized] , identifier[target_expl] ) identifier[res] . identifier[targets] . identifier[append] ( identifier[target_expl] ) keyword[return] identifier[res]
def explain_prediction_linear_regressor(reg, doc, vec=None, top=None, top_targets=None, target_names=None, targets=None, feature_names=None, feature_re=None, feature_filter=None, vectorized=False): """ Explain prediction of a linear regressor. See :func:`eli5.explain_prediction` for description of ``top``, ``top_targets``, ``target_names``, ``targets``, ``feature_names``, ``feature_re`` and ``feature_filter`` parameters. ``vec`` is a vectorizer instance used to transform raw features to the input of the classifier ``clf``; you can pass it instead of ``feature_names``. ``vectorized`` is a flag which tells eli5 if ``doc`` should be passed through ``vec`` or not. By default it is False, meaning that if ``vec`` is not None, ``vec.transform([doc])`` is passed to the regressor ``reg``. Set it to True if you're passing ``vec``, but ``doc`` is already vectorized. """ if isinstance(reg, (SVR, NuSVR)) and reg.kernel != 'linear': return explain_prediction_sklearn_not_supported(reg, doc) # depends on [control=['if'], data=[]] (vec, feature_names) = handle_vec(reg, doc, vec, vectorized, feature_names) X = get_X(doc, vec=vec, vectorized=vectorized, to_dense=True) (score,) = reg.predict(X) if has_intercept(reg): X = add_intercept(X) # depends on [control=['if'], data=[]] x = get_X0(X) (feature_names, flt_indices) = feature_names.handle_filter(feature_filter, feature_re, x) res = Explanation(estimator=repr(reg), method='linear model', targets=[], is_regression=True) assert res.targets is not None _weights = _linear_weights(reg, x, top, feature_names, flt_indices) names = get_default_target_names(reg) display_names = get_target_display_names(names, target_names, targets, top_targets, score) if is_multitarget_regressor(reg): for (label_id, label) in display_names: target_expl = TargetExplanation(target=label, feature_weights=_weights(label_id), score=score[label_id]) add_weighted_spans(doc, vec, vectorized, target_expl) res.targets.append(target_expl) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: target_expl = TargetExplanation(target=display_names[0][1], feature_weights=_weights(0), score=score) add_weighted_spans(doc, vec, vectorized, target_expl) res.targets.append(target_expl) return res
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ """ POST request """ course, __ = self.get_course_and_check_rights(courseid, allow_all_staff=False) msg = "" error = False data = web.input() if not data.get("token", "") == self.user_manager.session_token(): msg = _("Operation aborted due to invalid token.") error = True elif "wipeall" in data: if not data.get("courseid", "") == courseid: msg = _("Wrong course id.") error = True else: try: self.dump_course(courseid) msg = _("All course data have been deleted.") except: msg = _("An error occurred while dumping course from database.") error = True elif "restore" in data: if "backupdate" not in data: msg = "No backup date selected." error = True else: try: dt = datetime.datetime.strptime(data["backupdate"], "%Y%m%d.%H%M%S") self.restore_course(courseid, data["backupdate"]) msg = _("Course restored to date : {}.").format(dt.strftime("%Y-%m-%d %H:%M:%S")) except: msg = _("An error occurred while restoring backup.") error = True elif "deleteall" in data: if not data.get("courseid", "") == courseid: msg = _("Wrong course id.") error = True else: try: self.delete_course(courseid) web.seeother(self.app.get_homepath() + '/index') except: msg = _("An error occurred while deleting the course data.") error = True return self.page(course, msg, error)
def function[POST_AUTH, parameter[self, courseid]]: constant[ POST request ] <ast.Tuple object at 0x7da1b1737ee0> assign[=] call[name[self].get_course_and_check_rights, parameter[name[courseid]]] variable[msg] assign[=] constant[] variable[error] assign[=] constant[False] variable[data] assign[=] call[name[web].input, parameter[]] if <ast.UnaryOp object at 0x7da18f721a80> begin[:] variable[msg] assign[=] call[name[_], parameter[constant[Operation aborted due to invalid token.]]] variable[error] assign[=] constant[True] return[call[name[self].page, parameter[name[course], name[msg], name[error]]]]
keyword[def] identifier[POST_AUTH] ( identifier[self] , identifier[courseid] ): literal[string] identifier[course] , identifier[__] = identifier[self] . identifier[get_course_and_check_rights] ( identifier[courseid] , identifier[allow_all_staff] = keyword[False] ) identifier[msg] = literal[string] identifier[error] = keyword[False] identifier[data] = identifier[web] . identifier[input] () keyword[if] keyword[not] identifier[data] . identifier[get] ( literal[string] , literal[string] )== identifier[self] . identifier[user_manager] . identifier[session_token] (): identifier[msg] = identifier[_] ( literal[string] ) identifier[error] = keyword[True] keyword[elif] literal[string] keyword[in] identifier[data] : keyword[if] keyword[not] identifier[data] . identifier[get] ( literal[string] , literal[string] )== identifier[courseid] : identifier[msg] = identifier[_] ( literal[string] ) identifier[error] = keyword[True] keyword[else] : keyword[try] : identifier[self] . identifier[dump_course] ( identifier[courseid] ) identifier[msg] = identifier[_] ( literal[string] ) keyword[except] : identifier[msg] = identifier[_] ( literal[string] ) identifier[error] = keyword[True] keyword[elif] literal[string] keyword[in] identifier[data] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] : identifier[msg] = literal[string] identifier[error] = keyword[True] keyword[else] : keyword[try] : identifier[dt] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[data] [ literal[string] ], literal[string] ) identifier[self] . identifier[restore_course] ( identifier[courseid] , identifier[data] [ literal[string] ]) identifier[msg] = identifier[_] ( literal[string] ). identifier[format] ( identifier[dt] . identifier[strftime] ( literal[string] )) keyword[except] : identifier[msg] = identifier[_] ( literal[string] ) identifier[error] = keyword[True] keyword[elif] literal[string] keyword[in] identifier[data] : keyword[if] keyword[not] identifier[data] . identifier[get] ( literal[string] , literal[string] )== identifier[courseid] : identifier[msg] = identifier[_] ( literal[string] ) identifier[error] = keyword[True] keyword[else] : keyword[try] : identifier[self] . identifier[delete_course] ( identifier[courseid] ) identifier[web] . identifier[seeother] ( identifier[self] . identifier[app] . identifier[get_homepath] ()+ literal[string] ) keyword[except] : identifier[msg] = identifier[_] ( literal[string] ) identifier[error] = keyword[True] keyword[return] identifier[self] . identifier[page] ( identifier[course] , identifier[msg] , identifier[error] )
def POST_AUTH(self, courseid): # pylint: disable=arguments-differ ' POST request ' (course, __) = self.get_course_and_check_rights(courseid, allow_all_staff=False) msg = '' error = False data = web.input() if not data.get('token', '') == self.user_manager.session_token(): msg = _('Operation aborted due to invalid token.') error = True # depends on [control=['if'], data=[]] elif 'wipeall' in data: if not data.get('courseid', '') == courseid: msg = _('Wrong course id.') error = True # depends on [control=['if'], data=[]] else: try: self.dump_course(courseid) msg = _('All course data have been deleted.') # depends on [control=['try'], data=[]] except: msg = _('An error occurred while dumping course from database.') error = True # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['data']] elif 'restore' in data: if 'backupdate' not in data: msg = 'No backup date selected.' error = True # depends on [control=['if'], data=[]] else: try: dt = datetime.datetime.strptime(data['backupdate'], '%Y%m%d.%H%M%S') self.restore_course(courseid, data['backupdate']) msg = _('Course restored to date : {}.').format(dt.strftime('%Y-%m-%d %H:%M:%S')) # depends on [control=['try'], data=[]] except: msg = _('An error occurred while restoring backup.') error = True # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['data']] elif 'deleteall' in data: if not data.get('courseid', '') == courseid: msg = _('Wrong course id.') error = True # depends on [control=['if'], data=[]] else: try: self.delete_course(courseid) web.seeother(self.app.get_homepath() + '/index') # depends on [control=['try'], data=[]] except: msg = _('An error occurred while deleting the course data.') error = True # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['data']] return self.page(course, msg, error)
def lin_sim_calc(goid1, goid2, sim_r, termcnts): ''' Computes Lin's similarity measure using pre-calculated Resnik's similarities. ''' if sim_r is not None: info = get_info_content(goid1, termcnts) + get_info_content(goid2, termcnts) if info != 0: return (2*sim_r)/info
def function[lin_sim_calc, parameter[goid1, goid2, sim_r, termcnts]]: constant[ Computes Lin's similarity measure using pre-calculated Resnik's similarities. ] if compare[name[sim_r] is_not constant[None]] begin[:] variable[info] assign[=] binary_operation[call[name[get_info_content], parameter[name[goid1], name[termcnts]]] + call[name[get_info_content], parameter[name[goid2], name[termcnts]]]] if compare[name[info] not_equal[!=] constant[0]] begin[:] return[binary_operation[binary_operation[constant[2] * name[sim_r]] / name[info]]]
keyword[def] identifier[lin_sim_calc] ( identifier[goid1] , identifier[goid2] , identifier[sim_r] , identifier[termcnts] ): literal[string] keyword[if] identifier[sim_r] keyword[is] keyword[not] keyword[None] : identifier[info] = identifier[get_info_content] ( identifier[goid1] , identifier[termcnts] )+ identifier[get_info_content] ( identifier[goid2] , identifier[termcnts] ) keyword[if] identifier[info] != literal[int] : keyword[return] ( literal[int] * identifier[sim_r] )/ identifier[info]
def lin_sim_calc(goid1, goid2, sim_r, termcnts): """ Computes Lin's similarity measure using pre-calculated Resnik's similarities. """ if sim_r is not None: info = get_info_content(goid1, termcnts) + get_info_content(goid2, termcnts) if info != 0: return 2 * sim_r / info # depends on [control=['if'], data=['info']] # depends on [control=['if'], data=['sim_r']]
def guarded(self, call, *args): """ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. """ self.LOG.debug('%s(%s)' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), )) if not self.options.dry_run: try: call(*args) except (EnvironmentError, UnicodeError) as exc: self.fatal('%s(%s) failed [%s]' % ( call.__name__, ', '.join([pretty_path(i) for i in args]), exc, ))
def function[guarded, parameter[self, call]]: constant[ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. ] call[name[self].LOG.debug, parameter[binary_operation[constant[%s(%s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18ede6f80>, <ast.Call object at 0x7da18ede51e0>]]]]] if <ast.UnaryOp object at 0x7da18ede7220> begin[:] <ast.Try object at 0x7da18ede76a0>
keyword[def] identifier[guarded] ( identifier[self] , identifier[call] ,* identifier[args] ): literal[string] identifier[self] . identifier[LOG] . identifier[debug] ( literal[string] %( identifier[call] . identifier[__name__] , literal[string] . identifier[join] ([ identifier[pretty_path] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[args] ]), )) keyword[if] keyword[not] identifier[self] . identifier[options] . identifier[dry_run] : keyword[try] : identifier[call] (* identifier[args] ) keyword[except] ( identifier[EnvironmentError] , identifier[UnicodeError] ) keyword[as] identifier[exc] : identifier[self] . identifier[fatal] ( literal[string] %( identifier[call] . identifier[__name__] , literal[string] . identifier[join] ([ identifier[pretty_path] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[args] ]), identifier[exc] , ))
def guarded(self, call, *args): """ Catch exceptions thrown by filesystem calls, and don't really execute them in dry-run mode. """ self.LOG.debug('%s(%s)' % (call.__name__, ', '.join([pretty_path(i) for i in args]))) if not self.options.dry_run: try: call(*args) # depends on [control=['try'], data=[]] except (EnvironmentError, UnicodeError) as exc: self.fatal('%s(%s) failed [%s]' % (call.__name__, ', '.join([pretty_path(i) for i in args]), exc)) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]]
def _check_roSet(orb,kwargs,funcName): """Function to check whether ro is set, because it's required for funcName""" if not orb._roSet and kwargs.get('ro',None) is None: warnings.warn("Method %s(.) requires ro to be given at Orbit initialization or at method evaluation; using default ro which is %f kpc" % (funcName,orb._ro), galpyWarning)
def function[_check_roSet, parameter[orb, kwargs, funcName]]: constant[Function to check whether ro is set, because it's required for funcName] if <ast.BoolOp object at 0x7da1b0ec3f10> begin[:] call[name[warnings].warn, parameter[binary_operation[constant[Method %s(.) requires ro to be given at Orbit initialization or at method evaluation; using default ro which is %f kpc] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0c93d60>, <ast.Attribute object at 0x7da1b0c93d90>]]], name[galpyWarning]]]
keyword[def] identifier[_check_roSet] ( identifier[orb] , identifier[kwargs] , identifier[funcName] ): literal[string] keyword[if] keyword[not] identifier[orb] . identifier[_roSet] keyword[and] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ) keyword[is] keyword[None] : identifier[warnings] . identifier[warn] ( literal[string] %( identifier[funcName] , identifier[orb] . identifier[_ro] ), identifier[galpyWarning] )
def _check_roSet(orb, kwargs, funcName): """Function to check whether ro is set, because it's required for funcName""" if not orb._roSet and kwargs.get('ro', None) is None: warnings.warn('Method %s(.) requires ro to be given at Orbit initialization or at method evaluation; using default ro which is %f kpc' % (funcName, orb._ro), galpyWarning) # depends on [control=['if'], data=[]]
def __SoInit(self): '''fast_cut函数需要使用thulac.so,在这里导入.so文件''' if(not self.__user_specified_dict_name): self.__user_specified_dict_name = '' return SoExtention(self.__prefix, self.__user_specified_dict_name, self.__useT2S, self.__seg_only)
def function[__SoInit, parameter[self]]: constant[fast_cut函数需要使用thulac.so,在这里导入.so文件] if <ast.UnaryOp object at 0x7da18dc99660> begin[:] name[self].__user_specified_dict_name assign[=] constant[] return[call[name[SoExtention], parameter[name[self].__prefix, name[self].__user_specified_dict_name, name[self].__useT2S, name[self].__seg_only]]]
keyword[def] identifier[__SoInit] ( identifier[self] ): literal[string] keyword[if] ( keyword[not] identifier[self] . identifier[__user_specified_dict_name] ): identifier[self] . identifier[__user_specified_dict_name] = literal[string] keyword[return] identifier[SoExtention] ( identifier[self] . identifier[__prefix] , identifier[self] . identifier[__user_specified_dict_name] , identifier[self] . identifier[__useT2S] , identifier[self] . identifier[__seg_only] )
def __SoInit(self): """fast_cut函数需要使用thulac.so,在这里导入.so文件""" if not self.__user_specified_dict_name: self.__user_specified_dict_name = '' # depends on [control=['if'], data=[]] return SoExtention(self.__prefix, self.__user_specified_dict_name, self.__useT2S, self.__seg_only)
def GetClientStates(self, client_list, client_chunk=50): """Take in a client list and return dicts with their age and hostname.""" for client_group in collection.Batch(client_list, client_chunk): for fd in aff4.FACTORY.MultiOpen( client_group, mode="r", aff4_type=aff4_grr.VFSGRRClient, token=self.token): result = {} result["age"] = fd.Get(fd.Schema.PING) result["hostname"] = fd.Get(fd.Schema.HOSTNAME) yield (fd.urn, result)
def function[GetClientStates, parameter[self, client_list, client_chunk]]: constant[Take in a client list and return dicts with their age and hostname.] for taget[name[client_group]] in starred[call[name[collection].Batch, parameter[name[client_list], name[client_chunk]]]] begin[:] for taget[name[fd]] in starred[call[name[aff4].FACTORY.MultiOpen, parameter[name[client_group]]]] begin[:] variable[result] assign[=] dictionary[[], []] call[name[result]][constant[age]] assign[=] call[name[fd].Get, parameter[name[fd].Schema.PING]] call[name[result]][constant[hostname]] assign[=] call[name[fd].Get, parameter[name[fd].Schema.HOSTNAME]] <ast.Yield object at 0x7da1b1b44250>
keyword[def] identifier[GetClientStates] ( identifier[self] , identifier[client_list] , identifier[client_chunk] = literal[int] ): literal[string] keyword[for] identifier[client_group] keyword[in] identifier[collection] . identifier[Batch] ( identifier[client_list] , identifier[client_chunk] ): keyword[for] identifier[fd] keyword[in] identifier[aff4] . identifier[FACTORY] . identifier[MultiOpen] ( identifier[client_group] , identifier[mode] = literal[string] , identifier[aff4_type] = identifier[aff4_grr] . identifier[VFSGRRClient] , identifier[token] = identifier[self] . identifier[token] ): identifier[result] ={} identifier[result] [ literal[string] ]= identifier[fd] . identifier[Get] ( identifier[fd] . identifier[Schema] . identifier[PING] ) identifier[result] [ literal[string] ]= identifier[fd] . identifier[Get] ( identifier[fd] . identifier[Schema] . identifier[HOSTNAME] ) keyword[yield] ( identifier[fd] . identifier[urn] , identifier[result] )
def GetClientStates(self, client_list, client_chunk=50): """Take in a client list and return dicts with their age and hostname.""" for client_group in collection.Batch(client_list, client_chunk): for fd in aff4.FACTORY.MultiOpen(client_group, mode='r', aff4_type=aff4_grr.VFSGRRClient, token=self.token): result = {} result['age'] = fd.Get(fd.Schema.PING) result['hostname'] = fd.Get(fd.Schema.HOSTNAME) yield (fd.urn, result) # depends on [control=['for'], data=['fd']] # depends on [control=['for'], data=['client_group']]
def result(i): """ Returns which 8-bit registers are used by an asm instruction to return a result. """ ins = inst(i) op = oper(i) if ins in ('or', 'and') and op == ['a']: return ['f'] if ins in {'xor', 'or', 'and', 'neg', 'cpl', 'daa', 'rld', 'rrd', 'rra', 'rla', 'rrca', 'rlca'}: return ['a', 'f'] if ins in {'bit', 'cp', 'scf', 'ccf'}: return ['f'] if ins in {'sub', 'add', 'sbc', 'adc'}: if len(op) == 1: return ['a', 'f'] else: return single_registers(op[0]) + ['f'] if ins == 'djnz': return ['b', 'f'] if ins in {'ldir', 'ldi', 'lddr', 'ldd'}: return ['f', 'b', 'c', 'd', 'e', 'h', 'l'] if ins in {'cpi', 'cpir', 'cpd', 'cpdr'}: return ['f', 'b', 'c', 'h', 'l'] if ins in ('pop', 'ld'): return single_registers(op[0]) if ins in {'inc', 'dec', 'sbc', 'rr', 'rl', 'rrc', 'rlc'}: return ['f'] + single_registers(op[0]) if ins in ('set', 'res'): return single_registers(op[1]) return []
def function[result, parameter[i]]: constant[ Returns which 8-bit registers are used by an asm instruction to return a result. ] variable[ins] assign[=] call[name[inst], parameter[name[i]]] variable[op] assign[=] call[name[oper], parameter[name[i]]] if <ast.BoolOp object at 0x7da20c6c74c0> begin[:] return[list[[<ast.Constant object at 0x7da20c6c4790>]]] if compare[name[ins] in <ast.Set object at 0x7da20c6c7eb0>] begin[:] return[list[[<ast.Constant object at 0x7da20c6c4700>, <ast.Constant object at 0x7da20c6c4490>]]] if compare[name[ins] in <ast.Set object at 0x7da18f09c2b0>] begin[:] return[list[[<ast.Constant object at 0x7da18f09c490>]]] if compare[name[ins] in <ast.Set object at 0x7da18f09f730>] begin[:] if compare[call[name[len], parameter[name[op]]] equal[==] constant[1]] begin[:] return[list[[<ast.Constant object at 0x7da18f09fd60>, <ast.Constant object at 0x7da18f09f070>]]] if compare[name[ins] equal[==] constant[djnz]] begin[:] return[list[[<ast.Constant object at 0x7da18f09ebf0>, <ast.Constant object at 0x7da18f09f310>]]] if compare[name[ins] in <ast.Set object at 0x7da18f09da20>] begin[:] return[list[[<ast.Constant object at 0x7da18f09c250>, <ast.Constant object at 0x7da18f09e230>, <ast.Constant object at 0x7da18f09cb20>, <ast.Constant object at 0x7da18f09f1f0>, <ast.Constant object at 0x7da18f09c6a0>, <ast.Constant object at 0x7da18f09e6e0>, <ast.Constant object at 0x7da18f09d4e0>]]] if compare[name[ins] in <ast.Set object at 0x7da18f09d060>] begin[:] return[list[[<ast.Constant object at 0x7da18f09ee60>, <ast.Constant object at 0x7da18f09cbb0>, <ast.Constant object at 0x7da18f09cbe0>, <ast.Constant object at 0x7da18f09f610>, <ast.Constant object at 0x7da18f09d690>]]] if compare[name[ins] in tuple[[<ast.Constant object at 0x7da18f09fd90>, <ast.Constant object at 0x7da18f09f100>]]] begin[:] return[call[name[single_registers], parameter[call[name[op]][constant[0]]]]] if compare[name[ins] in <ast.Set object at 0x7da18f09dc30>] begin[:] return[binary_operation[list[[<ast.Constant object at 0x7da18f09e080>]] + call[name[single_registers], parameter[call[name[op]][constant[0]]]]]] if compare[name[ins] in tuple[[<ast.Constant object at 0x7da18f09fbe0>, <ast.Constant object at 0x7da18f09c3a0>]]] begin[:] return[call[name[single_registers], parameter[call[name[op]][constant[1]]]]] return[list[[]]]
keyword[def] identifier[result] ( identifier[i] ): literal[string] identifier[ins] = identifier[inst] ( identifier[i] ) identifier[op] = identifier[oper] ( identifier[i] ) keyword[if] identifier[ins] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[op] ==[ literal[string] ]: keyword[return] [ literal[string] ] keyword[if] identifier[ins] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }: keyword[return] [ literal[string] , literal[string] ] keyword[if] identifier[ins] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }: keyword[return] [ literal[string] ] keyword[if] identifier[ins] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }: keyword[if] identifier[len] ( identifier[op] )== literal[int] : keyword[return] [ literal[string] , literal[string] ] keyword[else] : keyword[return] identifier[single_registers] ( identifier[op] [ literal[int] ])+[ literal[string] ] keyword[if] identifier[ins] == literal[string] : keyword[return] [ literal[string] , literal[string] ] keyword[if] identifier[ins] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }: keyword[return] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[ins] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] }: keyword[return] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[ins] keyword[in] ( literal[string] , literal[string] ): keyword[return] identifier[single_registers] ( identifier[op] [ literal[int] ]) keyword[if] identifier[ins] keyword[in] { literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] }: keyword[return] [ literal[string] ]+ identifier[single_registers] ( identifier[op] [ literal[int] ]) keyword[if] identifier[ins] keyword[in] ( literal[string] , literal[string] ): keyword[return] identifier[single_registers] ( identifier[op] [ literal[int] ]) keyword[return] []
def result(i): """ Returns which 8-bit registers are used by an asm instruction to return a result. """ ins = inst(i) op = oper(i) if ins in ('or', 'and') and op == ['a']: return ['f'] # depends on [control=['if'], data=[]] if ins in {'xor', 'or', 'and', 'neg', 'cpl', 'daa', 'rld', 'rrd', 'rra', 'rla', 'rrca', 'rlca'}: return ['a', 'f'] # depends on [control=['if'], data=[]] if ins in {'bit', 'cp', 'scf', 'ccf'}: return ['f'] # depends on [control=['if'], data=[]] if ins in {'sub', 'add', 'sbc', 'adc'}: if len(op) == 1: return ['a', 'f'] # depends on [control=['if'], data=[]] else: return single_registers(op[0]) + ['f'] # depends on [control=['if'], data=[]] if ins == 'djnz': return ['b', 'f'] # depends on [control=['if'], data=[]] if ins in {'ldir', 'ldi', 'lddr', 'ldd'}: return ['f', 'b', 'c', 'd', 'e', 'h', 'l'] # depends on [control=['if'], data=[]] if ins in {'cpi', 'cpir', 'cpd', 'cpdr'}: return ['f', 'b', 'c', 'h', 'l'] # depends on [control=['if'], data=[]] if ins in ('pop', 'ld'): return single_registers(op[0]) # depends on [control=['if'], data=[]] if ins in {'inc', 'dec', 'sbc', 'rr', 'rl', 'rrc', 'rlc'}: return ['f'] + single_registers(op[0]) # depends on [control=['if'], data=[]] if ins in ('set', 'res'): return single_registers(op[1]) # depends on [control=['if'], data=[]] return []
def rfc2425encode(name,value,parameters=None,charset="utf-8"): """Encodes a vCard field into an RFC2425 line. :Parameters: - `name`: field type name - `value`: field value - `parameters`: optional parameters - `charset`: encoding of the output and of the `value` (if not `unicode`) :Types: - `name`: `str` - `value`: `unicode` or `str` - `parameters`: `dict` of `str` -> `str` - `charset`: `str` :return: the encoded RFC2425 line (possibly folded) :returntype: `str`""" if not parameters: parameters={} if type(value) is unicode: value=value.replace(u"\r\n",u"\\n") value=value.replace(u"\n",u"\\n") value=value.replace(u"\r",u"\\n") value=value.encode(charset,"replace") elif type(value) is not str: raise TypeError("Bad type for rfc2425 value") elif not valid_string_re.match(value): parameters["encoding"]="b" value=binascii.b2a_base64(value) ret=str(name).lower() for k,v in parameters.items(): ret+=";%s=%s" % (str(k),str(v)) ret+=":" while(len(value)>70): ret+=value[:70]+"\r\n " value=value[70:] ret+=value+"\r\n" return ret
def function[rfc2425encode, parameter[name, value, parameters, charset]]: constant[Encodes a vCard field into an RFC2425 line. :Parameters: - `name`: field type name - `value`: field value - `parameters`: optional parameters - `charset`: encoding of the output and of the `value` (if not `unicode`) :Types: - `name`: `str` - `value`: `unicode` or `str` - `parameters`: `dict` of `str` -> `str` - `charset`: `str` :return: the encoded RFC2425 line (possibly folded) :returntype: `str`] if <ast.UnaryOp object at 0x7da1b004f370> begin[:] variable[parameters] assign[=] dictionary[[], []] if compare[call[name[type], parameter[name[value]]] is name[unicode]] begin[:] variable[value] assign[=] call[name[value].replace, parameter[constant[ ], constant[\n]]] variable[value] assign[=] call[name[value].replace, parameter[constant[ ], constant[\n]]] variable[value] assign[=] call[name[value].replace, parameter[constant[ ], constant[\n]]] variable[value] assign[=] call[name[value].encode, parameter[name[charset], constant[replace]]] variable[ret] assign[=] call[call[name[str], parameter[name[name]]].lower, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c991750>, <ast.Name object at 0x7da20c992140>]]] in starred[call[name[parameters].items, parameter[]]] begin[:] <ast.AugAssign object at 0x7da18eb57c40> <ast.AugAssign object at 0x7da18ede4310> while compare[call[name[len], parameter[name[value]]] greater[>] constant[70]] begin[:] <ast.AugAssign object at 0x7da18ede4820> variable[value] assign[=] call[name[value]][<ast.Slice object at 0x7da18ede73a0>] <ast.AugAssign object at 0x7da18ede5840> return[name[ret]]
keyword[def] identifier[rfc2425encode] ( identifier[name] , identifier[value] , identifier[parameters] = keyword[None] , identifier[charset] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[parameters] : identifier[parameters] ={} keyword[if] identifier[type] ( identifier[value] ) keyword[is] identifier[unicode] : identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] ) identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] ) identifier[value] = identifier[value] . identifier[replace] ( literal[string] , literal[string] ) identifier[value] = identifier[value] . identifier[encode] ( identifier[charset] , literal[string] ) keyword[elif] identifier[type] ( identifier[value] ) keyword[is] keyword[not] identifier[str] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[elif] keyword[not] identifier[valid_string_re] . identifier[match] ( identifier[value] ): identifier[parameters] [ literal[string] ]= literal[string] identifier[value] = identifier[binascii] . identifier[b2a_base64] ( identifier[value] ) identifier[ret] = identifier[str] ( identifier[name] ). identifier[lower] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[parameters] . identifier[items] (): identifier[ret] += literal[string] %( identifier[str] ( identifier[k] ), identifier[str] ( identifier[v] )) identifier[ret] += literal[string] keyword[while] ( identifier[len] ( identifier[value] )> literal[int] ): identifier[ret] += identifier[value] [: literal[int] ]+ literal[string] identifier[value] = identifier[value] [ literal[int] :] identifier[ret] += identifier[value] + literal[string] keyword[return] identifier[ret]
def rfc2425encode(name, value, parameters=None, charset='utf-8'): """Encodes a vCard field into an RFC2425 line. :Parameters: - `name`: field type name - `value`: field value - `parameters`: optional parameters - `charset`: encoding of the output and of the `value` (if not `unicode`) :Types: - `name`: `str` - `value`: `unicode` or `str` - `parameters`: `dict` of `str` -> `str` - `charset`: `str` :return: the encoded RFC2425 line (possibly folded) :returntype: `str`""" if not parameters: parameters = {} # depends on [control=['if'], data=[]] if type(value) is unicode: value = value.replace(u'\r\n', u'\\n') value = value.replace(u'\n', u'\\n') value = value.replace(u'\r', u'\\n') value = value.encode(charset, 'replace') # depends on [control=['if'], data=[]] elif type(value) is not str: raise TypeError('Bad type for rfc2425 value') # depends on [control=['if'], data=[]] elif not valid_string_re.match(value): parameters['encoding'] = 'b' value = binascii.b2a_base64(value) # depends on [control=['if'], data=[]] ret = str(name).lower() for (k, v) in parameters.items(): ret += ';%s=%s' % (str(k), str(v)) # depends on [control=['for'], data=[]] ret += ':' while len(value) > 70: ret += value[:70] + '\r\n ' value = value[70:] # depends on [control=['while'], data=[]] ret += value + '\r\n' return ret
def merge(self, sample): """ Use bbmerge to merge paired FASTQ files for use in metagenomics pipelines. Create a report with the total number of reads, and the number of reads that could be paired :param sample: metadata sample object flagged as a metagenome """ # Set the assembly file to 'NA' as assembly is not desirable for metagenomes sample.general.assemblyfile = 'NA' # Can only merge paired-end if len(sample.general.fastqfiles) == 2: outpath = os.path.join(sample.general.outputdirectory, 'merged_reads') make_path(outpath) # Merge path - keep all the merged FASTQ files in one directory merge_path = os.path.join(self.path, 'merged_reads') make_path(merge_path) # Set the name of the merged, and unmerged files sample.general.mergedreads = \ os.path.join(merge_path, '{}_paired.fastq.gz'.format(sample.name)) log = os.path.join(outpath, 'log') error = os.path.join(outpath, 'err') try: if not os.path.isfile(sample.general.mergedreads): # Run the merging command out, err, cmd = bbtools.bbmerge(forward_in=sorted(sample.general.trimmedcorrectedfastqfiles)[0], merged_reads=sample.general.mergedreads, mix=True, returncmd=True, threads=self.cpus) write_to_logfile(out, err, self.logfile, sample.general.logout, sample.general.logerr, None, None) with open(log, 'w') as log_file: log_file.write(out) with open(error, 'w') as error_file: error_file.write(err) except (CalledProcessError, IndexError): delattr(sample.general, 'mergedreads') # Set the name of the report to store the metagenome file merging results report = os.path.join(self.reportpath, 'merged_metagenomes.csv') # Extract the total number of reads, and the number of reads that could be paired from the bbmerge # err stream num_reads, num_pairs = self.reads(error) # If the report doesn't exist, create it with the header and the results from the first sample if not os.path.isfile(report): with open(report, 'w') as report_file: report_file.write('Sample,TotalReads,PairedReads\n{sample},{total},{paired}\n' .format(sample=sample.name, total=num_reads, paired=num_pairs)) # If the report exists, open it to determine which samples have already been added - useful if re-running # the analysis else: lines = list() with open(report, 'r') as report_file: for line in report_file: lines.append(line.split(',')[0]) # Add the results to the report if sample.name not in lines: with open(report, 'a+') as report_file: report_file.write('{sample},{total},{paired}\n' .format(sample=sample.name, total=num_reads, paired=num_pairs))
def function[merge, parameter[self, sample]]: constant[ Use bbmerge to merge paired FASTQ files for use in metagenomics pipelines. Create a report with the total number of reads, and the number of reads that could be paired :param sample: metadata sample object flagged as a metagenome ] name[sample].general.assemblyfile assign[=] constant[NA] if compare[call[name[len], parameter[name[sample].general.fastqfiles]] equal[==] constant[2]] begin[:] variable[outpath] assign[=] call[name[os].path.join, parameter[name[sample].general.outputdirectory, constant[merged_reads]]] call[name[make_path], parameter[name[outpath]]] variable[merge_path] assign[=] call[name[os].path.join, parameter[name[self].path, constant[merged_reads]]] call[name[make_path], parameter[name[merge_path]]] name[sample].general.mergedreads assign[=] call[name[os].path.join, parameter[name[merge_path], call[constant[{}_paired.fastq.gz].format, parameter[name[sample].name]]]] variable[log] assign[=] call[name[os].path.join, parameter[name[outpath], constant[log]]] variable[error] assign[=] call[name[os].path.join, parameter[name[outpath], constant[err]]] <ast.Try object at 0x7da204962e00> variable[report] assign[=] call[name[os].path.join, parameter[name[self].reportpath, constant[merged_metagenomes.csv]]] <ast.Tuple object at 0x7da20c76ec50> assign[=] call[name[self].reads, parameter[name[error]]] if <ast.UnaryOp object at 0x7da20c76fa30> begin[:] with call[name[open], parameter[name[report], constant[w]]] begin[:] call[name[report_file].write, parameter[call[constant[Sample,TotalReads,PairedReads {sample},{total},{paired} ].format, parameter[]]]]
keyword[def] identifier[merge] ( identifier[self] , identifier[sample] ): literal[string] identifier[sample] . identifier[general] . identifier[assemblyfile] = literal[string] keyword[if] identifier[len] ( identifier[sample] . identifier[general] . identifier[fastqfiles] )== literal[int] : identifier[outpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[sample] . identifier[general] . identifier[outputdirectory] , literal[string] ) identifier[make_path] ( identifier[outpath] ) identifier[merge_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , literal[string] ) identifier[make_path] ( identifier[merge_path] ) identifier[sample] . identifier[general] . identifier[mergedreads] = identifier[os] . identifier[path] . identifier[join] ( identifier[merge_path] , literal[string] . identifier[format] ( identifier[sample] . identifier[name] )) identifier[log] = identifier[os] . identifier[path] . identifier[join] ( identifier[outpath] , literal[string] ) identifier[error] = identifier[os] . identifier[path] . identifier[join] ( identifier[outpath] , literal[string] ) keyword[try] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[sample] . identifier[general] . identifier[mergedreads] ): identifier[out] , identifier[err] , identifier[cmd] = identifier[bbtools] . identifier[bbmerge] ( identifier[forward_in] = identifier[sorted] ( identifier[sample] . identifier[general] . identifier[trimmedcorrectedfastqfiles] )[ literal[int] ], identifier[merged_reads] = identifier[sample] . identifier[general] . identifier[mergedreads] , identifier[mix] = keyword[True] , identifier[returncmd] = keyword[True] , identifier[threads] = identifier[self] . identifier[cpus] ) identifier[write_to_logfile] ( identifier[out] , identifier[err] , identifier[self] . identifier[logfile] , identifier[sample] . identifier[general] . identifier[logout] , identifier[sample] . identifier[general] . identifier[logerr] , keyword[None] , keyword[None] ) keyword[with] identifier[open] ( identifier[log] , literal[string] ) keyword[as] identifier[log_file] : identifier[log_file] . identifier[write] ( identifier[out] ) keyword[with] identifier[open] ( identifier[error] , literal[string] ) keyword[as] identifier[error_file] : identifier[error_file] . identifier[write] ( identifier[err] ) keyword[except] ( identifier[CalledProcessError] , identifier[IndexError] ): identifier[delattr] ( identifier[sample] . identifier[general] , literal[string] ) identifier[report] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[reportpath] , literal[string] ) identifier[num_reads] , identifier[num_pairs] = identifier[self] . identifier[reads] ( identifier[error] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[report] ): keyword[with] identifier[open] ( identifier[report] , literal[string] ) keyword[as] identifier[report_file] : identifier[report_file] . identifier[write] ( literal[string] . identifier[format] ( identifier[sample] = identifier[sample] . identifier[name] , identifier[total] = identifier[num_reads] , identifier[paired] = identifier[num_pairs] )) keyword[else] : identifier[lines] = identifier[list] () keyword[with] identifier[open] ( identifier[report] , literal[string] ) keyword[as] identifier[report_file] : keyword[for] identifier[line] keyword[in] identifier[report_file] : identifier[lines] . identifier[append] ( identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[if] identifier[sample] . identifier[name] keyword[not] keyword[in] identifier[lines] : keyword[with] identifier[open] ( identifier[report] , literal[string] ) keyword[as] identifier[report_file] : identifier[report_file] . identifier[write] ( literal[string] . identifier[format] ( identifier[sample] = identifier[sample] . identifier[name] , identifier[total] = identifier[num_reads] , identifier[paired] = identifier[num_pairs] ))
def merge(self, sample): """ Use bbmerge to merge paired FASTQ files for use in metagenomics pipelines. Create a report with the total number of reads, and the number of reads that could be paired :param sample: metadata sample object flagged as a metagenome """ # Set the assembly file to 'NA' as assembly is not desirable for metagenomes sample.general.assemblyfile = 'NA' # Can only merge paired-end if len(sample.general.fastqfiles) == 2: outpath = os.path.join(sample.general.outputdirectory, 'merged_reads') make_path(outpath) # Merge path - keep all the merged FASTQ files in one directory merge_path = os.path.join(self.path, 'merged_reads') make_path(merge_path) # Set the name of the merged, and unmerged files sample.general.mergedreads = os.path.join(merge_path, '{}_paired.fastq.gz'.format(sample.name)) log = os.path.join(outpath, 'log') error = os.path.join(outpath, 'err') try: if not os.path.isfile(sample.general.mergedreads): # Run the merging command (out, err, cmd) = bbtools.bbmerge(forward_in=sorted(sample.general.trimmedcorrectedfastqfiles)[0], merged_reads=sample.general.mergedreads, mix=True, returncmd=True, threads=self.cpus) write_to_logfile(out, err, self.logfile, sample.general.logout, sample.general.logerr, None, None) with open(log, 'w') as log_file: log_file.write(out) # depends on [control=['with'], data=['log_file']] with open(error, 'w') as error_file: error_file.write(err) # depends on [control=['with'], data=['error_file']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except (CalledProcessError, IndexError): delattr(sample.general, 'mergedreads') # depends on [control=['except'], data=[]] # Set the name of the report to store the metagenome file merging results report = os.path.join(self.reportpath, 'merged_metagenomes.csv') # Extract the total number of reads, and the number of reads that could be paired from the bbmerge # err stream (num_reads, num_pairs) = self.reads(error) # If the report doesn't exist, create it with the header and the results from the first sample if not os.path.isfile(report): with open(report, 'w') as report_file: report_file.write('Sample,TotalReads,PairedReads\n{sample},{total},{paired}\n'.format(sample=sample.name, total=num_reads, paired=num_pairs)) # depends on [control=['with'], data=['report_file']] # depends on [control=['if'], data=[]] else: # If the report exists, open it to determine which samples have already been added - useful if re-running # the analysis lines = list() with open(report, 'r') as report_file: for line in report_file: lines.append(line.split(',')[0]) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['report_file']] # Add the results to the report if sample.name not in lines: with open(report, 'a+') as report_file: report_file.write('{sample},{total},{paired}\n'.format(sample=sample.name, total=num_reads, paired=num_pairs)) # depends on [control=['with'], data=['report_file']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]