code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def kmeans_segmentation(image, k, kmask=None, mrf=0.1): """ K-means image segmentation that is a wrapper around `ants.atropos` ANTsR function: `kmeansSegmentation` Arguments --------- image : ANTsImage input image k : integer integer number of classes kmask : ANTsImage (optional) segment inside this mask mrf : scalar smoothness, higher is smoother Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read(ants.get_ants_data('r16'), 'float') >>> fi = ants.n3_bias_field_correction(fi, 2) >>> seg = ants.kmeans_segmentation(fi, 3) """ dim = image.dimension kmimage = utils.iMath(image, 'Normalize') if kmask is None: kmask = utils.get_mask(kmimage, 0.01, 1, cleanup=2) kmask = utils.iMath(kmask, 'FillHoles').threshold_image(1,2) nhood = 'x'.join(['1']*dim) mrf = '[%s,%s]' % (str(mrf), nhood) kmimage = atropos(a = kmimage, m = mrf, c = '[5,0]', i = 'kmeans[%s]'%(str(k)), x = kmask) kmimage['segmentation'] = kmimage['segmentation'].clone(image.pixeltype) return kmimage
def function[kmeans_segmentation, parameter[image, k, kmask, mrf]]: constant[ K-means image segmentation that is a wrapper around `ants.atropos` ANTsR function: `kmeansSegmentation` Arguments --------- image : ANTsImage input image k : integer integer number of classes kmask : ANTsImage (optional) segment inside this mask mrf : scalar smoothness, higher is smoother Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read(ants.get_ants_data('r16'), 'float') >>> fi = ants.n3_bias_field_correction(fi, 2) >>> seg = ants.kmeans_segmentation(fi, 3) ] variable[dim] assign[=] name[image].dimension variable[kmimage] assign[=] call[name[utils].iMath, parameter[name[image], constant[Normalize]]] if compare[name[kmask] is constant[None]] begin[:] variable[kmask] assign[=] call[name[utils].get_mask, parameter[name[kmimage], constant[0.01], constant[1]]] variable[kmask] assign[=] call[call[name[utils].iMath, parameter[name[kmask], constant[FillHoles]]].threshold_image, parameter[constant[1], constant[2]]] variable[nhood] assign[=] call[constant[x].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da1b2345570>]] * name[dim]]]] variable[mrf] assign[=] binary_operation[constant[[%s,%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b2345930>, <ast.Name object at 0x7da1b2344490>]]] variable[kmimage] assign[=] call[name[atropos], parameter[]] call[name[kmimage]][constant[segmentation]] assign[=] call[call[name[kmimage]][constant[segmentation]].clone, parameter[name[image].pixeltype]] return[name[kmimage]]
keyword[def] identifier[kmeans_segmentation] ( identifier[image] , identifier[k] , identifier[kmask] = keyword[None] , identifier[mrf] = literal[int] ): literal[string] identifier[dim] = identifier[image] . identifier[dimension] identifier[kmimage] = identifier[utils] . identifier[iMath] ( identifier[image] , literal[string] ) keyword[if] identifier[kmask] keyword[is] keyword[None] : identifier[kmask] = identifier[utils] . identifier[get_mask] ( identifier[kmimage] , literal[int] , literal[int] , identifier[cleanup] = literal[int] ) identifier[kmask] = identifier[utils] . identifier[iMath] ( identifier[kmask] , literal[string] ). identifier[threshold_image] ( literal[int] , literal[int] ) identifier[nhood] = literal[string] . identifier[join] ([ literal[string] ]* identifier[dim] ) identifier[mrf] = literal[string] %( identifier[str] ( identifier[mrf] ), identifier[nhood] ) identifier[kmimage] = identifier[atropos] ( identifier[a] = identifier[kmimage] , identifier[m] = identifier[mrf] , identifier[c] = literal[string] , identifier[i] = literal[string] %( identifier[str] ( identifier[k] )), identifier[x] = identifier[kmask] ) identifier[kmimage] [ literal[string] ]= identifier[kmimage] [ literal[string] ]. identifier[clone] ( identifier[image] . identifier[pixeltype] ) keyword[return] identifier[kmimage]
def kmeans_segmentation(image, k, kmask=None, mrf=0.1): """ K-means image segmentation that is a wrapper around `ants.atropos` ANTsR function: `kmeansSegmentation` Arguments --------- image : ANTsImage input image k : integer integer number of classes kmask : ANTsImage (optional) segment inside this mask mrf : scalar smoothness, higher is smoother Returns ------- ANTsImage Example ------- >>> import ants >>> fi = ants.image_read(ants.get_ants_data('r16'), 'float') >>> fi = ants.n3_bias_field_correction(fi, 2) >>> seg = ants.kmeans_segmentation(fi, 3) """ dim = image.dimension kmimage = utils.iMath(image, 'Normalize') if kmask is None: kmask = utils.get_mask(kmimage, 0.01, 1, cleanup=2) # depends on [control=['if'], data=['kmask']] kmask = utils.iMath(kmask, 'FillHoles').threshold_image(1, 2) nhood = 'x'.join(['1'] * dim) mrf = '[%s,%s]' % (str(mrf), nhood) kmimage = atropos(a=kmimage, m=mrf, c='[5,0]', i='kmeans[%s]' % str(k), x=kmask) kmimage['segmentation'] = kmimage['segmentation'].clone(image.pixeltype) return kmimage
def with_proxies(proxy_map, get_key): """Class decorator factory; adds proxy class variables to target class. :param dict proxy_map: Mapping between class variable labels and proxied classes :param function get_key: Extension-specific key function; may return e.g. the current Flask request """ def wrapper(cls): for label, ProxiedClass in six.iteritems(proxy_map): proxy = proxy_factory(cls, label, ProxiedClass, get_key) setattr(cls, label, proxy) return cls return wrapper
def function[with_proxies, parameter[proxy_map, get_key]]: constant[Class decorator factory; adds proxy class variables to target class. :param dict proxy_map: Mapping between class variable labels and proxied classes :param function get_key: Extension-specific key function; may return e.g. the current Flask request ] def function[wrapper, parameter[cls]]: for taget[tuple[[<ast.Name object at 0x7da204564100>, <ast.Name object at 0x7da204564ca0>]]] in starred[call[name[six].iteritems, parameter[name[proxy_map]]]] begin[:] variable[proxy] assign[=] call[name[proxy_factory], parameter[name[cls], name[label], name[ProxiedClass], name[get_key]]] call[name[setattr], parameter[name[cls], name[label], name[proxy]]] return[name[cls]] return[name[wrapper]]
keyword[def] identifier[with_proxies] ( identifier[proxy_map] , identifier[get_key] ): literal[string] keyword[def] identifier[wrapper] ( identifier[cls] ): keyword[for] identifier[label] , identifier[ProxiedClass] keyword[in] identifier[six] . identifier[iteritems] ( identifier[proxy_map] ): identifier[proxy] = identifier[proxy_factory] ( identifier[cls] , identifier[label] , identifier[ProxiedClass] , identifier[get_key] ) identifier[setattr] ( identifier[cls] , identifier[label] , identifier[proxy] ) keyword[return] identifier[cls] keyword[return] identifier[wrapper]
def with_proxies(proxy_map, get_key): """Class decorator factory; adds proxy class variables to target class. :param dict proxy_map: Mapping between class variable labels and proxied classes :param function get_key: Extension-specific key function; may return e.g. the current Flask request """ def wrapper(cls): for (label, ProxiedClass) in six.iteritems(proxy_map): proxy = proxy_factory(cls, label, ProxiedClass, get_key) setattr(cls, label, proxy) # depends on [control=['for'], data=[]] return cls return wrapper
def remove_location(self, location): # type: (str) -> bool """Remove a location. If the location is already added, it is ignored. Args: location (str): Location to remove Returns: bool: True if location removed or False if not """ res = self._remove_hdxobject(self.data.get('groups'), location, matchon='name') if not res: res = self._remove_hdxobject(self.data.get('groups'), location.upper(), matchon='name') if not res: res = self._remove_hdxobject(self.data.get('groups'), location.lower(), matchon='name') return res
def function[remove_location, parameter[self, location]]: constant[Remove a location. If the location is already added, it is ignored. Args: location (str): Location to remove Returns: bool: True if location removed or False if not ] variable[res] assign[=] call[name[self]._remove_hdxobject, parameter[call[name[self].data.get, parameter[constant[groups]]], name[location]]] if <ast.UnaryOp object at 0x7da18eb54190> begin[:] variable[res] assign[=] call[name[self]._remove_hdxobject, parameter[call[name[self].data.get, parameter[constant[groups]]], call[name[location].upper, parameter[]]]] if <ast.UnaryOp object at 0x7da1b0e33670> begin[:] variable[res] assign[=] call[name[self]._remove_hdxobject, parameter[call[name[self].data.get, parameter[constant[groups]]], call[name[location].lower, parameter[]]]] return[name[res]]
keyword[def] identifier[remove_location] ( identifier[self] , identifier[location] ): literal[string] identifier[res] = identifier[self] . identifier[_remove_hdxobject] ( identifier[self] . identifier[data] . identifier[get] ( literal[string] ), identifier[location] , identifier[matchon] = literal[string] ) keyword[if] keyword[not] identifier[res] : identifier[res] = identifier[self] . identifier[_remove_hdxobject] ( identifier[self] . identifier[data] . identifier[get] ( literal[string] ), identifier[location] . identifier[upper] (), identifier[matchon] = literal[string] ) keyword[if] keyword[not] identifier[res] : identifier[res] = identifier[self] . identifier[_remove_hdxobject] ( identifier[self] . identifier[data] . identifier[get] ( literal[string] ), identifier[location] . identifier[lower] (), identifier[matchon] = literal[string] ) keyword[return] identifier[res]
def remove_location(self, location): # type: (str) -> bool 'Remove a location. If the location is already added, it is ignored.\n\n Args:\n location (str): Location to remove\n\n Returns:\n bool: True if location removed or False if not\n ' res = self._remove_hdxobject(self.data.get('groups'), location, matchon='name') if not res: res = self._remove_hdxobject(self.data.get('groups'), location.upper(), matchon='name') # depends on [control=['if'], data=[]] if not res: res = self._remove_hdxobject(self.data.get('groups'), location.lower(), matchon='name') # depends on [control=['if'], data=[]] return res
def prefix_dirs(path): """ Return an iterable of all prefix directories of path, descending from root. """ _dirname = posixpath.dirname path = path.strip('/') out = [] while path != '': path = _dirname(path) out.append(path) return reversed(out)
def function[prefix_dirs, parameter[path]]: constant[ Return an iterable of all prefix directories of path, descending from root. ] variable[_dirname] assign[=] name[posixpath].dirname variable[path] assign[=] call[name[path].strip, parameter[constant[/]]] variable[out] assign[=] list[[]] while compare[name[path] not_equal[!=] constant[]] begin[:] variable[path] assign[=] call[name[_dirname], parameter[name[path]]] call[name[out].append, parameter[name[path]]] return[call[name[reversed], parameter[name[out]]]]
keyword[def] identifier[prefix_dirs] ( identifier[path] ): literal[string] identifier[_dirname] = identifier[posixpath] . identifier[dirname] identifier[path] = identifier[path] . identifier[strip] ( literal[string] ) identifier[out] =[] keyword[while] identifier[path] != literal[string] : identifier[path] = identifier[_dirname] ( identifier[path] ) identifier[out] . identifier[append] ( identifier[path] ) keyword[return] identifier[reversed] ( identifier[out] )
def prefix_dirs(path): """ Return an iterable of all prefix directories of path, descending from root. """ _dirname = posixpath.dirname path = path.strip('/') out = [] while path != '': path = _dirname(path) out.append(path) # depends on [control=['while'], data=['path']] return reversed(out)
def send_document(self, *args, **kwargs): """See :func:`send_document`""" return send_document(*args, **self._merge_overrides(**kwargs)).run()
def function[send_document, parameter[self]]: constant[See :func:`send_document`] return[call[call[name[send_document], parameter[<ast.Starred object at 0x7da1b0e9c310>]].run, parameter[]]]
keyword[def] identifier[send_document] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[send_document] (* identifier[args] ,** identifier[self] . identifier[_merge_overrides] (** identifier[kwargs] )). identifier[run] ()
def send_document(self, *args, **kwargs): """See :func:`send_document`""" return send_document(*args, **self._merge_overrides(**kwargs)).run()
def filter(self, func): """Returns a packet list filtered by a truth function. This truth function has to take a packet as the only argument and return a boolean value.""" # noqa: E501 return self.__class__([x for x in self.res if func(x)], name="filtered %s" % self.listname)
def function[filter, parameter[self, func]]: constant[Returns a packet list filtered by a truth function. This truth function has to take a packet as the only argument and return a boolean value.] return[call[name[self].__class__, parameter[<ast.ListComp object at 0x7da2044c3070>]]]
keyword[def] identifier[filter] ( identifier[self] , identifier[func] ): literal[string] keyword[return] identifier[self] . identifier[__class__] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[res] keyword[if] identifier[func] ( identifier[x] )], identifier[name] = literal[string] % identifier[self] . identifier[listname] )
def filter(self, func): """Returns a packet list filtered by a truth function. This truth function has to take a packet as the only argument and return a boolean value.""" # noqa: E501 return self.__class__([x for x in self.res if func(x)], name='filtered %s' % self.listname)
def consume_asset(event, agreement_id, did, service_agreement, consumer_account, consume_callback): """ Consumption of an asset after get the event call. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_account: Account instance of the consumer :param consume_callback: """ logger.debug(f"consuming asset after event {event}.") if consume_callback: config = ConfigProvider.get_config() secret_store = SecretStoreProvider.get_secret_store( config.secret_store_url, config.parity_url, consumer_account ) brizo = BrizoProvider.get_brizo() consume_callback( agreement_id, service_agreement.service_definition_id, DIDResolver(Keeper.get_instance().did_registry).resolve(did), consumer_account, ConfigProvider.get_config().downloads_path, brizo, secret_store )
def function[consume_asset, parameter[event, agreement_id, did, service_agreement, consumer_account, consume_callback]]: constant[ Consumption of an asset after get the event call. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_account: Account instance of the consumer :param consume_callback: ] call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da204623970>]] if name[consume_callback] begin[:] variable[config] assign[=] call[name[ConfigProvider].get_config, parameter[]] variable[secret_store] assign[=] call[name[SecretStoreProvider].get_secret_store, parameter[name[config].secret_store_url, name[config].parity_url, name[consumer_account]]] variable[brizo] assign[=] call[name[BrizoProvider].get_brizo, parameter[]] call[name[consume_callback], parameter[name[agreement_id], name[service_agreement].service_definition_id, call[call[name[DIDResolver], parameter[call[name[Keeper].get_instance, parameter[]].did_registry]].resolve, parameter[name[did]]], name[consumer_account], call[name[ConfigProvider].get_config, parameter[]].downloads_path, name[brizo], name[secret_store]]]
keyword[def] identifier[consume_asset] ( identifier[event] , identifier[agreement_id] , identifier[did] , identifier[service_agreement] , identifier[consumer_account] , identifier[consume_callback] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) keyword[if] identifier[consume_callback] : identifier[config] = identifier[ConfigProvider] . identifier[get_config] () identifier[secret_store] = identifier[SecretStoreProvider] . identifier[get_secret_store] ( identifier[config] . identifier[secret_store_url] , identifier[config] . identifier[parity_url] , identifier[consumer_account] ) identifier[brizo] = identifier[BrizoProvider] . identifier[get_brizo] () identifier[consume_callback] ( identifier[agreement_id] , identifier[service_agreement] . identifier[service_definition_id] , identifier[DIDResolver] ( identifier[Keeper] . identifier[get_instance] (). identifier[did_registry] ). identifier[resolve] ( identifier[did] ), identifier[consumer_account] , identifier[ConfigProvider] . identifier[get_config] (). identifier[downloads_path] , identifier[brizo] , identifier[secret_store] )
def consume_asset(event, agreement_id, did, service_agreement, consumer_account, consume_callback): """ Consumption of an asset after get the event call. :param event: AttributeDict with the event data. :param agreement_id: id of the agreement, hex str :param did: DID, str :param service_agreement: ServiceAgreement instance :param consumer_account: Account instance of the consumer :param consume_callback: """ logger.debug(f'consuming asset after event {event}.') if consume_callback: config = ConfigProvider.get_config() secret_store = SecretStoreProvider.get_secret_store(config.secret_store_url, config.parity_url, consumer_account) brizo = BrizoProvider.get_brizo() consume_callback(agreement_id, service_agreement.service_definition_id, DIDResolver(Keeper.get_instance().did_registry).resolve(did), consumer_account, ConfigProvider.get_config().downloads_path, brizo, secret_store) # depends on [control=['if'], data=[]]
def as_stream(self): """ Return a zipped package as a readable stream """ stream = io.BytesIO() self._store(stream) stream.seek(0) return stream
def function[as_stream, parameter[self]]: constant[ Return a zipped package as a readable stream ] variable[stream] assign[=] call[name[io].BytesIO, parameter[]] call[name[self]._store, parameter[name[stream]]] call[name[stream].seek, parameter[constant[0]]] return[name[stream]]
keyword[def] identifier[as_stream] ( identifier[self] ): literal[string] identifier[stream] = identifier[io] . identifier[BytesIO] () identifier[self] . identifier[_store] ( identifier[stream] ) identifier[stream] . identifier[seek] ( literal[int] ) keyword[return] identifier[stream]
def as_stream(self): """ Return a zipped package as a readable stream """ stream = io.BytesIO() self._store(stream) stream.seek(0) return stream
def share_file_with_user(self, path, user, **kwargs): """Shares a remote file with specified user :param path: path to the remote file to share :param user: name of the user whom we want to share a file/folder :param perms (optional): permissions of the shared object defaults to read only (1) http://doc.owncloud.org/server/6.0/admin_manual/sharing_api/index.html :param remote_user (optional): True if it is a federated users defaults to False if it is a local user :returns: instance of :class:`ShareInfo` with the share info or False if the operation failed :raises: HTTPResponseError in case an HTTP error status was returned """ remote_user = kwargs.get('remote_user', False) perms = kwargs.get('perms', self.OCS_PERMISSION_READ) if (((not isinstance(perms, int)) or (perms > self.OCS_PERMISSION_ALL)) or ((not isinstance(user, six.string_types)) or (user == ''))): return False if remote_user and (not user.endswith('/')): user = user + '/' path = self._normalize_path(path) post_data = { 'shareType': self.OCS_SHARE_TYPE_REMOTE if remote_user else self.OCS_SHARE_TYPE_USER, 'shareWith': user, 'path': self._encode_string(path), 'permissions': perms } res = self._make_ocs_request( 'POST', self.OCS_SERVICE_SHARE, 'shares', data=post_data ) if self._debug: print('OCS share_file request for file %s with permissions %i ' 'returned: %i' % (path, perms, res.status_code)) if res.status_code == 200: tree = ET.fromstring(res.content) self._check_ocs_status(tree) data_el = tree.find('data') return ShareInfo( { 'id': data_el.find('id').text, 'path': path, 'permissions': perms } ) raise HTTPResponseError(res)
def function[share_file_with_user, parameter[self, path, user]]: constant[Shares a remote file with specified user :param path: path to the remote file to share :param user: name of the user whom we want to share a file/folder :param perms (optional): permissions of the shared object defaults to read only (1) http://doc.owncloud.org/server/6.0/admin_manual/sharing_api/index.html :param remote_user (optional): True if it is a federated users defaults to False if it is a local user :returns: instance of :class:`ShareInfo` with the share info or False if the operation failed :raises: HTTPResponseError in case an HTTP error status was returned ] variable[remote_user] assign[=] call[name[kwargs].get, parameter[constant[remote_user], constant[False]]] variable[perms] assign[=] call[name[kwargs].get, parameter[constant[perms], name[self].OCS_PERMISSION_READ]] if <ast.BoolOp object at 0x7da20c6c7c40> begin[:] return[constant[False]] if <ast.BoolOp object at 0x7da20c6c6ec0> begin[:] variable[user] assign[=] binary_operation[name[user] + constant[/]] variable[path] assign[=] call[name[self]._normalize_path, parameter[name[path]]] variable[post_data] assign[=] dictionary[[<ast.Constant object at 0x7da20c6c5330>, <ast.Constant object at 0x7da20c6c6260>, <ast.Constant object at 0x7da20c6c65c0>, <ast.Constant object at 0x7da20c6c73a0>], [<ast.IfExp object at 0x7da20c6c7010>, <ast.Name object at 0x7da20c6c65f0>, <ast.Call object at 0x7da20c6c7670>, <ast.Name object at 0x7da20c6c60e0>]] variable[res] assign[=] call[name[self]._make_ocs_request, parameter[constant[POST], name[self].OCS_SERVICE_SHARE, constant[shares]]] if name[self]._debug begin[:] call[name[print], parameter[binary_operation[constant[OCS share_file request for file %s with permissions %i returned: %i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c6c7b80>, <ast.Name object at 0x7da20c6c4af0>, <ast.Attribute object at 0x7da20c6c5db0>]]]]] if compare[name[res].status_code equal[==] constant[200]] begin[:] variable[tree] assign[=] call[name[ET].fromstring, parameter[name[res].content]] call[name[self]._check_ocs_status, parameter[name[tree]]] variable[data_el] assign[=] call[name[tree].find, parameter[constant[data]]] return[call[name[ShareInfo], parameter[dictionary[[<ast.Constant object at 0x7da20c6c7280>, <ast.Constant object at 0x7da20c6c6230>, <ast.Constant object at 0x7da20c6c7970>], [<ast.Attribute object at 0x7da20c6c6a70>, <ast.Name object at 0x7da20c6c7cd0>, <ast.Name object at 0x7da20c6c4c70>]]]]] <ast.Raise object at 0x7da20c6c5420>
keyword[def] identifier[share_file_with_user] ( identifier[self] , identifier[path] , identifier[user] ,** identifier[kwargs] ): literal[string] identifier[remote_user] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ) identifier[perms] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[OCS_PERMISSION_READ] ) keyword[if] ((( keyword[not] identifier[isinstance] ( identifier[perms] , identifier[int] )) keyword[or] ( identifier[perms] > identifier[self] . identifier[OCS_PERMISSION_ALL] )) keyword[or] (( keyword[not] identifier[isinstance] ( identifier[user] , identifier[six] . identifier[string_types] )) keyword[or] ( identifier[user] == literal[string] ))): keyword[return] keyword[False] keyword[if] identifier[remote_user] keyword[and] ( keyword[not] identifier[user] . identifier[endswith] ( literal[string] )): identifier[user] = identifier[user] + literal[string] identifier[path] = identifier[self] . identifier[_normalize_path] ( identifier[path] ) identifier[post_data] ={ literal[string] : identifier[self] . identifier[OCS_SHARE_TYPE_REMOTE] keyword[if] identifier[remote_user] keyword[else] identifier[self] . identifier[OCS_SHARE_TYPE_USER] , literal[string] : identifier[user] , literal[string] : identifier[self] . identifier[_encode_string] ( identifier[path] ), literal[string] : identifier[perms] } identifier[res] = identifier[self] . identifier[_make_ocs_request] ( literal[string] , identifier[self] . identifier[OCS_SERVICE_SHARE] , literal[string] , identifier[data] = identifier[post_data] ) keyword[if] identifier[self] . identifier[_debug] : identifier[print] ( literal[string] literal[string] %( identifier[path] , identifier[perms] , identifier[res] . identifier[status_code] )) keyword[if] identifier[res] . identifier[status_code] == literal[int] : identifier[tree] = identifier[ET] . identifier[fromstring] ( identifier[res] . identifier[content] ) identifier[self] . identifier[_check_ocs_status] ( identifier[tree] ) identifier[data_el] = identifier[tree] . identifier[find] ( literal[string] ) keyword[return] identifier[ShareInfo] ( { literal[string] : identifier[data_el] . identifier[find] ( literal[string] ). identifier[text] , literal[string] : identifier[path] , literal[string] : identifier[perms] } ) keyword[raise] identifier[HTTPResponseError] ( identifier[res] )
def share_file_with_user(self, path, user, **kwargs): """Shares a remote file with specified user :param path: path to the remote file to share :param user: name of the user whom we want to share a file/folder :param perms (optional): permissions of the shared object defaults to read only (1) http://doc.owncloud.org/server/6.0/admin_manual/sharing_api/index.html :param remote_user (optional): True if it is a federated users defaults to False if it is a local user :returns: instance of :class:`ShareInfo` with the share info or False if the operation failed :raises: HTTPResponseError in case an HTTP error status was returned """ remote_user = kwargs.get('remote_user', False) perms = kwargs.get('perms', self.OCS_PERMISSION_READ) if (not isinstance(perms, int) or perms > self.OCS_PERMISSION_ALL) or (not isinstance(user, six.string_types) or user == ''): return False # depends on [control=['if'], data=[]] if remote_user and (not user.endswith('/')): user = user + '/' # depends on [control=['if'], data=[]] path = self._normalize_path(path) post_data = {'shareType': self.OCS_SHARE_TYPE_REMOTE if remote_user else self.OCS_SHARE_TYPE_USER, 'shareWith': user, 'path': self._encode_string(path), 'permissions': perms} res = self._make_ocs_request('POST', self.OCS_SERVICE_SHARE, 'shares', data=post_data) if self._debug: print('OCS share_file request for file %s with permissions %i returned: %i' % (path, perms, res.status_code)) # depends on [control=['if'], data=[]] if res.status_code == 200: tree = ET.fromstring(res.content) self._check_ocs_status(tree) data_el = tree.find('data') return ShareInfo({'id': data_el.find('id').text, 'path': path, 'permissions': perms}) # depends on [control=['if'], data=[]] raise HTTPResponseError(res)
def avail_locations(call=None): ''' Return a dict of all available VM locations on the cloud provider with relevant data ''' if call == 'action': raise SaltCloudSystemExit( 'The avail_locations function must be called with ' '-f or --function, or with the --list-locations option' ) items = query(method='regions') ret = {} for region in items['regions']: ret[region['name']] = {} for item in six.iterkeys(region): ret[region['name']][item] = six.text_type(region[item]) return ret
def function[avail_locations, parameter[call]]: constant[ Return a dict of all available VM locations on the cloud provider with relevant data ] if compare[name[call] equal[==] constant[action]] begin[:] <ast.Raise object at 0x7da1b1f4beb0> variable[items] assign[=] call[name[query], parameter[]] variable[ret] assign[=] dictionary[[], []] for taget[name[region]] in starred[call[name[items]][constant[regions]]] begin[:] call[name[ret]][call[name[region]][constant[name]]] assign[=] dictionary[[], []] for taget[name[item]] in starred[call[name[six].iterkeys, parameter[name[region]]]] begin[:] call[call[name[ret]][call[name[region]][constant[name]]]][name[item]] assign[=] call[name[six].text_type, parameter[call[name[region]][name[item]]]] return[name[ret]]
keyword[def] identifier[avail_locations] ( identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] == literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] ) identifier[items] = identifier[query] ( identifier[method] = literal[string] ) identifier[ret] ={} keyword[for] identifier[region] keyword[in] identifier[items] [ literal[string] ]: identifier[ret] [ identifier[region] [ literal[string] ]]={} keyword[for] identifier[item] keyword[in] identifier[six] . identifier[iterkeys] ( identifier[region] ): identifier[ret] [ identifier[region] [ literal[string] ]][ identifier[item] ]= identifier[six] . identifier[text_type] ( identifier[region] [ identifier[item] ]) keyword[return] identifier[ret]
def avail_locations(call=None): """ Return a dict of all available VM locations on the cloud provider with relevant data """ if call == 'action': raise SaltCloudSystemExit('The avail_locations function must be called with -f or --function, or with the --list-locations option') # depends on [control=['if'], data=[]] items = query(method='regions') ret = {} for region in items['regions']: ret[region['name']] = {} for item in six.iterkeys(region): ret[region['name']][item] = six.text_type(region[item]) # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['region']] return ret
def main(args=None): """Parse command-line arguments, tconvert inputs, and print """ # define command line arguments parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("-V", "--version", action="version", version=__version__, help="show version number and exit") parser.add_argument("-l", "--local", action="store_true", default=False, help="print datetimes in local timezone") parser.add_argument("-f", "--format", type=str, action="store", default=r"%Y-%m-%d %H:%M:%S.%f %Z", help="output datetime format (default: %(default)r)") parser.add_argument("input", help="GPS or datetime string to convert", nargs="*") # parse and convert args = parser.parse_args(args) input_ = " ".join(args.input) output = tconvert(input_) # print (now with timezones!) if isinstance(output, datetime.datetime): output = output.replace(tzinfo=tz.tzutc()) if args.local: output = output.astimezone(tz.tzlocal()) print(output.strftime(args.format)) else: print(output)
def function[main, parameter[args]]: constant[Parse command-line arguments, tconvert inputs, and print ] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[-V], constant[--version]]] call[name[parser].add_argument, parameter[constant[-l], constant[--local]]] call[name[parser].add_argument, parameter[constant[-f], constant[--format]]] call[name[parser].add_argument, parameter[constant[input]]] variable[args] assign[=] call[name[parser].parse_args, parameter[name[args]]] variable[input_] assign[=] call[constant[ ].join, parameter[name[args].input]] variable[output] assign[=] call[name[tconvert], parameter[name[input_]]] if call[name[isinstance], parameter[name[output], name[datetime].datetime]] begin[:] variable[output] assign[=] call[name[output].replace, parameter[]] if name[args].local begin[:] variable[output] assign[=] call[name[output].astimezone, parameter[call[name[tz].tzlocal, parameter[]]]] call[name[print], parameter[call[name[output].strftime, parameter[name[args].format]]]]
keyword[def] identifier[main] ( identifier[args] = keyword[None] ): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[__doc__] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[version] = identifier[__version__] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[type] = identifier[str] , identifier[action] = literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] , identifier[nargs] = literal[string] ) identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[args] ) identifier[input_] = literal[string] . identifier[join] ( identifier[args] . identifier[input] ) identifier[output] = identifier[tconvert] ( identifier[input_] ) keyword[if] identifier[isinstance] ( identifier[output] , identifier[datetime] . identifier[datetime] ): identifier[output] = identifier[output] . identifier[replace] ( identifier[tzinfo] = identifier[tz] . identifier[tzutc] ()) keyword[if] identifier[args] . identifier[local] : identifier[output] = identifier[output] . identifier[astimezone] ( identifier[tz] . identifier[tzlocal] ()) identifier[print] ( identifier[output] . identifier[strftime] ( identifier[args] . identifier[format] )) keyword[else] : identifier[print] ( identifier[output] )
def main(args=None): """Parse command-line arguments, tconvert inputs, and print """ # define command line arguments parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('-V', '--version', action='version', version=__version__, help='show version number and exit') parser.add_argument('-l', '--local', action='store_true', default=False, help='print datetimes in local timezone') parser.add_argument('-f', '--format', type=str, action='store', default='%Y-%m-%d %H:%M:%S.%f %Z', help='output datetime format (default: %(default)r)') parser.add_argument('input', help='GPS or datetime string to convert', nargs='*') # parse and convert args = parser.parse_args(args) input_ = ' '.join(args.input) output = tconvert(input_) # print (now with timezones!) if isinstance(output, datetime.datetime): output = output.replace(tzinfo=tz.tzutc()) if args.local: output = output.astimezone(tz.tzlocal()) # depends on [control=['if'], data=[]] print(output.strftime(args.format)) # depends on [control=['if'], data=[]] else: print(output)
def delete(self, id, product_id, session): '''taobao.product.img.delete 删除产品非主图 1.传入非主图ID 2.传入产品ID 删除产品非主图''' request = TOPRequest('taobao.product.img.delete') request['id'] = id request['product_id'] = product_id self.create(self.execute(request, session)['product_img']) return self
def function[delete, parameter[self, id, product_id, session]]: constant[taobao.product.img.delete 删除产品非主图 1.传入非主图ID 2.传入产品ID 删除产品非主图] variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.product.img.delete]]] call[name[request]][constant[id]] assign[=] name[id] call[name[request]][constant[product_id]] assign[=] name[product_id] call[name[self].create, parameter[call[call[name[self].execute, parameter[name[request], name[session]]]][constant[product_img]]]] return[name[self]]
keyword[def] identifier[delete] ( identifier[self] , identifier[id] , identifier[product_id] , identifier[session] ): literal[string] identifier[request] = identifier[TOPRequest] ( literal[string] ) identifier[request] [ literal[string] ]= identifier[id] identifier[request] [ literal[string] ]= identifier[product_id] identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] )[ literal[string] ]) keyword[return] identifier[self]
def delete(self, id, product_id, session): """taobao.product.img.delete 删除产品非主图 1.传入非主图ID 2.传入产品ID 删除产品非主图""" request = TOPRequest('taobao.product.img.delete') request['id'] = id request['product_id'] = product_id self.create(self.execute(request, session)['product_img']) return self
def decode_packet(data): """decode the data, return some kind of PDU.""" if _debug: decode_packet._debug("decode_packet %r", data) # empty strings are some other kind of pcap content if not data: return None # assume it is ethernet for now d = decode_ethernet(data) pduSource = Address(d['source_address']) pduDestination = Address(d['destination_address']) data = d['data'] # there could be a VLAN header if (d['type'] == 0x8100): if _debug: decode_packet._debug(" - vlan found") d = decode_vlan(data) data = d['data'] # look for IP packets if (d['type'] == 0x0800): if _debug: decode_packet._debug(" - IP found") d = decode_ip(data) pduSource, pduDestination = d['source_address'], d['destination_address'] data = d['data'] if (d['protocol'] == 'udp'): if _debug: decode_packet._debug(" - UDP found") d = decode_udp(data) data = d['data'] pduSource = Address((pduSource, d['source_port'])) pduDestination = Address((pduDestination, d['destination_port'])) if _debug: decode_packet._debug(" - pduSource: %r", pduSource) decode_packet._debug(" - pduDestination: %r", pduDestination) else: if _debug: decode_packet._debug(" - not a UDP packet") else: if _debug: decode_packet._debug(" - not an IP packet") # check for empty if not data: if _debug: decode_packet._debug(" - empty packet") return None # build a PDU pdu = PDU(data, source=pduSource, destination=pduDestination) # check for a BVLL header if (pdu.pduData[0] == 0x81): if _debug: decode_packet._debug(" - BVLL header found") try: xpdu = BVLPDU() xpdu.decode(pdu) pdu = xpdu except Exception as err: if _debug: decode_packet._debug(" - BVLPDU decoding error: %r", err) return pdu # make a more focused interpretation atype = bvl_pdu_types.get(pdu.bvlciFunction) if not atype: if _debug: decode_packet._debug(" - unknown BVLL type: %r", pdu.bvlciFunction) return pdu # decode it as one of the basic types try: xpdu = pdu bpdu = atype() bpdu.decode(pdu) if _debug: decode_packet._debug(" - bpdu: %r", bpdu) pdu = bpdu # lift the address for forwarded NPDU's if atype is ForwardedNPDU: pdu.pduSource = bpdu.bvlciAddress # no deeper decoding for some elif atype not in (DistributeBroadcastToNetwork, OriginalUnicastNPDU, OriginalBroadcastNPDU): return pdu except Exception as err: if _debug: decode_packet._debug(" - decoding Error: %r", err) return xpdu # check for version number if (pdu.pduData[0] != 0x01): if _debug: decode_packet._debug(" - not a version 1 packet: %s...", btox(pdu.pduData[:30], '.')) return None # it's an NPDU try: npdu = NPDU() npdu.decode(pdu) except Exception as err: if _debug: decode_packet._debug(" - decoding Error: %r", err) return None # application or network layer message if npdu.npduNetMessage is None: if _debug: decode_packet._debug(" - not a network layer message, try as an APDU") # decode as a generic APDU try: xpdu = APDU() xpdu.decode(npdu) apdu = xpdu except Exception as err: if _debug: decode_packet._debug(" - decoding Error: %r", err) return npdu # "lift" the source and destination address if npdu.npduSADR: apdu.pduSource = npdu.npduSADR else: apdu.pduSource = npdu.pduSource if npdu.npduDADR: apdu.pduDestination = npdu.npduDADR else: apdu.pduDestination = npdu.pduDestination # make a more focused interpretation atype = apdu_types.get(apdu.apduType) if not atype: if _debug: decode_packet._debug(" - unknown APDU type: %r", apdu.apduType) return apdu # decode it as one of the basic types try: xpdu = apdu apdu = atype() apdu.decode(xpdu) except Exception as err: if _debug: decode_packet._debug(" - decoding Error: %r", err) return xpdu # decode it at the next level if isinstance(apdu, ConfirmedRequestPDU): atype = confirmed_request_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(" - no confirmed request decoder: %r", apdu.apduService) return apdu elif isinstance(apdu, UnconfirmedRequestPDU): atype = unconfirmed_request_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(" - no unconfirmed request decoder: %r", apdu.apduService) return apdu elif isinstance(apdu, SimpleAckPDU): atype = None elif isinstance(apdu, ComplexAckPDU): atype = complex_ack_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(" - no complex ack decoder: %r", apdu.apduService) return apdu elif isinstance(apdu, SegmentAckPDU): atype = None elif isinstance(apdu, ErrorPDU): atype = error_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(" - no error decoder: %r", apdu.apduService) return apdu elif isinstance(apdu, RejectPDU): atype = None elif isinstance(apdu, AbortPDU): atype = None if _debug: decode_packet._debug(" - atype: %r", atype) # deeper decoding try: if atype: xpdu = apdu apdu = atype() apdu.decode(xpdu) except Exception as err: if _debug: decode_packet._debug(" - decoding error: %r", err) return xpdu # success return apdu else: # make a more focused interpretation ntype = npdu_types.get(npdu.npduNetMessage) if not ntype: if _debug: decode_packet._debug(" - no network layer decoder: %r", npdu.npduNetMessage) return npdu if _debug: decode_packet._debug(" - ntype: %r", ntype) # deeper decoding try: xpdu = npdu npdu = ntype() npdu.decode(xpdu) except Exception as err: if _debug: decode_packet._debug(" - decoding error: %r", err) return xpdu # success return npdu
def function[decode_packet, parameter[data]]: constant[decode the data, return some kind of PDU.] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[decode_packet %r], name[data]]] if <ast.UnaryOp object at 0x7da2041d9f60> begin[:] return[constant[None]] variable[d] assign[=] call[name[decode_ethernet], parameter[name[data]]] variable[pduSource] assign[=] call[name[Address], parameter[call[name[d]][constant[source_address]]]] variable[pduDestination] assign[=] call[name[Address], parameter[call[name[d]][constant[destination_address]]]] variable[data] assign[=] call[name[d]][constant[data]] if compare[call[name[d]][constant[type]] equal[==] constant[33024]] begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - vlan found]]] variable[d] assign[=] call[name[decode_vlan], parameter[name[data]]] variable[data] assign[=] call[name[d]][constant[data]] if compare[call[name[d]][constant[type]] equal[==] constant[2048]] begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - IP found]]] variable[d] assign[=] call[name[decode_ip], parameter[name[data]]] <ast.Tuple object at 0x7da2041d9ff0> assign[=] tuple[[<ast.Subscript object at 0x7da2041db0d0>, <ast.Subscript object at 0x7da2041da2c0>]] variable[data] assign[=] call[name[d]][constant[data]] if compare[call[name[d]][constant[protocol]] equal[==] constant[udp]] begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - UDP found]]] variable[d] assign[=] call[name[decode_udp], parameter[name[data]]] variable[data] assign[=] call[name[d]][constant[data]] variable[pduSource] assign[=] call[name[Address], parameter[tuple[[<ast.Name object at 0x7da2041d97b0>, <ast.Subscript object at 0x7da2041dbb20>]]]] variable[pduDestination] assign[=] call[name[Address], parameter[tuple[[<ast.Name object at 0x7da2041db580>, <ast.Subscript object at 0x7da2041da830>]]]] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - pduSource: %r], name[pduSource]]] call[name[decode_packet]._debug, parameter[constant[ - pduDestination: %r], name[pduDestination]]] if <ast.UnaryOp object at 0x7da2041da590> begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - empty packet]]] return[constant[None]] variable[pdu] assign[=] call[name[PDU], parameter[name[data]]] if compare[call[name[pdu].pduData][constant[0]] equal[==] constant[129]] begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - BVLL header found]]] <ast.Try object at 0x7da2041d98d0> variable[atype] assign[=] call[name[bvl_pdu_types].get, parameter[name[pdu].bvlciFunction]] if <ast.UnaryOp object at 0x7da2041d88b0> begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - unknown BVLL type: %r], name[pdu].bvlciFunction]] return[name[pdu]] <ast.Try object at 0x7da2041db040> if compare[call[name[pdu].pduData][constant[0]] not_equal[!=] constant[1]] begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - not a version 1 packet: %s...], call[name[btox], parameter[call[name[pdu].pduData][<ast.Slice object at 0x7da1b084c7c0>], constant[.]]]]] return[constant[None]] <ast.Try object at 0x7da1b084cfa0> if compare[name[npdu].npduNetMessage is constant[None]] begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - not a network layer message, try as an APDU]]] <ast.Try object at 0x7da1b084fee0> if name[npdu].npduSADR begin[:] name[apdu].pduSource assign[=] name[npdu].npduSADR if name[npdu].npduDADR begin[:] name[apdu].pduDestination assign[=] name[npdu].npduDADR variable[atype] assign[=] call[name[apdu_types].get, parameter[name[apdu].apduType]] if <ast.UnaryOp object at 0x7da1b084d960> begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - unknown APDU type: %r], name[apdu].apduType]] return[name[apdu]] <ast.Try object at 0x7da1b084f520> if call[name[isinstance], parameter[name[apdu], name[ConfirmedRequestPDU]]] begin[:] variable[atype] assign[=] call[name[confirmed_request_types].get, parameter[name[apdu].apduService]] if <ast.UnaryOp object at 0x7da18ede7be0> begin[:] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - no confirmed request decoder: %r], name[apdu].apduService]] return[name[apdu]] if name[_debug] begin[:] call[name[decode_packet]._debug, parameter[constant[ - atype: %r], name[atype]]] <ast.Try object at 0x7da18ede7730> return[name[apdu]]
keyword[def] identifier[decode_packet] ( identifier[data] ): literal[string] keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[data] ) keyword[if] keyword[not] identifier[data] : keyword[return] keyword[None] identifier[d] = identifier[decode_ethernet] ( identifier[data] ) identifier[pduSource] = identifier[Address] ( identifier[d] [ literal[string] ]) identifier[pduDestination] = identifier[Address] ( identifier[d] [ literal[string] ]) identifier[data] = identifier[d] [ literal[string] ] keyword[if] ( identifier[d] [ literal[string] ]== literal[int] ): keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) identifier[d] = identifier[decode_vlan] ( identifier[data] ) identifier[data] = identifier[d] [ literal[string] ] keyword[if] ( identifier[d] [ literal[string] ]== literal[int] ): keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) identifier[d] = identifier[decode_ip] ( identifier[data] ) identifier[pduSource] , identifier[pduDestination] = identifier[d] [ literal[string] ], identifier[d] [ literal[string] ] identifier[data] = identifier[d] [ literal[string] ] keyword[if] ( identifier[d] [ literal[string] ]== literal[string] ): keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) identifier[d] = identifier[decode_udp] ( identifier[data] ) identifier[data] = identifier[d] [ literal[string] ] identifier[pduSource] = identifier[Address] (( identifier[pduSource] , identifier[d] [ literal[string] ])) identifier[pduDestination] = identifier[Address] (( identifier[pduDestination] , identifier[d] [ literal[string] ])) keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[pduSource] ) identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[pduDestination] ) keyword[else] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) keyword[else] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) keyword[if] keyword[not] identifier[data] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) keyword[return] keyword[None] identifier[pdu] = identifier[PDU] ( identifier[data] , identifier[source] = identifier[pduSource] , identifier[destination] = identifier[pduDestination] ) keyword[if] ( identifier[pdu] . identifier[pduData] [ literal[int] ]== literal[int] ): keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) keyword[try] : identifier[xpdu] = identifier[BVLPDU] () identifier[xpdu] . identifier[decode] ( identifier[pdu] ) identifier[pdu] = identifier[xpdu] keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] identifier[pdu] identifier[atype] = identifier[bvl_pdu_types] . identifier[get] ( identifier[pdu] . identifier[bvlciFunction] ) keyword[if] keyword[not] identifier[atype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[pdu] . identifier[bvlciFunction] ) keyword[return] identifier[pdu] keyword[try] : identifier[xpdu] = identifier[pdu] identifier[bpdu] = identifier[atype] () identifier[bpdu] . identifier[decode] ( identifier[pdu] ) keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[bpdu] ) identifier[pdu] = identifier[bpdu] keyword[if] identifier[atype] keyword[is] identifier[ForwardedNPDU] : identifier[pdu] . identifier[pduSource] = identifier[bpdu] . identifier[bvlciAddress] keyword[elif] identifier[atype] keyword[not] keyword[in] ( identifier[DistributeBroadcastToNetwork] , identifier[OriginalUnicastNPDU] , identifier[OriginalBroadcastNPDU] ): keyword[return] identifier[pdu] keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] identifier[xpdu] keyword[if] ( identifier[pdu] . identifier[pduData] [ literal[int] ]!= literal[int] ): keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[btox] ( identifier[pdu] . identifier[pduData] [: literal[int] ], literal[string] )) keyword[return] keyword[None] keyword[try] : identifier[npdu] = identifier[NPDU] () identifier[npdu] . identifier[decode] ( identifier[pdu] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] keyword[None] keyword[if] identifier[npdu] . identifier[npduNetMessage] keyword[is] keyword[None] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] ) keyword[try] : identifier[xpdu] = identifier[APDU] () identifier[xpdu] . identifier[decode] ( identifier[npdu] ) identifier[apdu] = identifier[xpdu] keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] identifier[npdu] keyword[if] identifier[npdu] . identifier[npduSADR] : identifier[apdu] . identifier[pduSource] = identifier[npdu] . identifier[npduSADR] keyword[else] : identifier[apdu] . identifier[pduSource] = identifier[npdu] . identifier[pduSource] keyword[if] identifier[npdu] . identifier[npduDADR] : identifier[apdu] . identifier[pduDestination] = identifier[npdu] . identifier[npduDADR] keyword[else] : identifier[apdu] . identifier[pduDestination] = identifier[npdu] . identifier[pduDestination] identifier[atype] = identifier[apdu_types] . identifier[get] ( identifier[apdu] . identifier[apduType] ) keyword[if] keyword[not] identifier[atype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[apdu] . identifier[apduType] ) keyword[return] identifier[apdu] keyword[try] : identifier[xpdu] = identifier[apdu] identifier[apdu] = identifier[atype] () identifier[apdu] . identifier[decode] ( identifier[xpdu] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] identifier[xpdu] keyword[if] identifier[isinstance] ( identifier[apdu] , identifier[ConfirmedRequestPDU] ): identifier[atype] = identifier[confirmed_request_types] . identifier[get] ( identifier[apdu] . identifier[apduService] ) keyword[if] keyword[not] identifier[atype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[apdu] . identifier[apduService] ) keyword[return] identifier[apdu] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[UnconfirmedRequestPDU] ): identifier[atype] = identifier[unconfirmed_request_types] . identifier[get] ( identifier[apdu] . identifier[apduService] ) keyword[if] keyword[not] identifier[atype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[apdu] . identifier[apduService] ) keyword[return] identifier[apdu] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[SimpleAckPDU] ): identifier[atype] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[ComplexAckPDU] ): identifier[atype] = identifier[complex_ack_types] . identifier[get] ( identifier[apdu] . identifier[apduService] ) keyword[if] keyword[not] identifier[atype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[apdu] . identifier[apduService] ) keyword[return] identifier[apdu] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[SegmentAckPDU] ): identifier[atype] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[ErrorPDU] ): identifier[atype] = identifier[error_types] . identifier[get] ( identifier[apdu] . identifier[apduService] ) keyword[if] keyword[not] identifier[atype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[apdu] . identifier[apduService] ) keyword[return] identifier[apdu] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[RejectPDU] ): identifier[atype] = keyword[None] keyword[elif] identifier[isinstance] ( identifier[apdu] , identifier[AbortPDU] ): identifier[atype] = keyword[None] keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[atype] ) keyword[try] : keyword[if] identifier[atype] : identifier[xpdu] = identifier[apdu] identifier[apdu] = identifier[atype] () identifier[apdu] . identifier[decode] ( identifier[xpdu] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] identifier[xpdu] keyword[return] identifier[apdu] keyword[else] : identifier[ntype] = identifier[npdu_types] . identifier[get] ( identifier[npdu] . identifier[npduNetMessage] ) keyword[if] keyword[not] identifier[ntype] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[npdu] . identifier[npduNetMessage] ) keyword[return] identifier[npdu] keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[ntype] ) keyword[try] : identifier[xpdu] = identifier[npdu] identifier[npdu] = identifier[ntype] () identifier[npdu] . identifier[decode] ( identifier[xpdu] ) keyword[except] identifier[Exception] keyword[as] identifier[err] : keyword[if] identifier[_debug] : identifier[decode_packet] . identifier[_debug] ( literal[string] , identifier[err] ) keyword[return] identifier[xpdu] keyword[return] identifier[npdu]
def decode_packet(data): """decode the data, return some kind of PDU.""" if _debug: decode_packet._debug('decode_packet %r', data) # depends on [control=['if'], data=[]] # empty strings are some other kind of pcap content if not data: return None # depends on [control=['if'], data=[]] # assume it is ethernet for now d = decode_ethernet(data) pduSource = Address(d['source_address']) pduDestination = Address(d['destination_address']) data = d['data'] # there could be a VLAN header if d['type'] == 33024: if _debug: decode_packet._debug(' - vlan found') # depends on [control=['if'], data=[]] d = decode_vlan(data) data = d['data'] # depends on [control=['if'], data=[]] # look for IP packets if d['type'] == 2048: if _debug: decode_packet._debug(' - IP found') # depends on [control=['if'], data=[]] d = decode_ip(data) (pduSource, pduDestination) = (d['source_address'], d['destination_address']) data = d['data'] if d['protocol'] == 'udp': if _debug: decode_packet._debug(' - UDP found') # depends on [control=['if'], data=[]] d = decode_udp(data) data = d['data'] pduSource = Address((pduSource, d['source_port'])) pduDestination = Address((pduDestination, d['destination_port'])) if _debug: decode_packet._debug(' - pduSource: %r', pduSource) decode_packet._debug(' - pduDestination: %r', pduDestination) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif _debug: decode_packet._debug(' - not a UDP packet') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif _debug: decode_packet._debug(' - not an IP packet') # depends on [control=['if'], data=[]] # check for empty if not data: if _debug: decode_packet._debug(' - empty packet') # depends on [control=['if'], data=[]] return None # depends on [control=['if'], data=[]] # build a PDU pdu = PDU(data, source=pduSource, destination=pduDestination) # check for a BVLL header if pdu.pduData[0] == 129: if _debug: decode_packet._debug(' - BVLL header found') # depends on [control=['if'], data=[]] try: xpdu = BVLPDU() xpdu.decode(pdu) pdu = xpdu # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - BVLPDU decoding error: %r', err) # depends on [control=['if'], data=[]] return pdu # depends on [control=['except'], data=['err']] # make a more focused interpretation atype = bvl_pdu_types.get(pdu.bvlciFunction) if not atype: if _debug: decode_packet._debug(' - unknown BVLL type: %r', pdu.bvlciFunction) # depends on [control=['if'], data=[]] return pdu # depends on [control=['if'], data=[]] # decode it as one of the basic types try: xpdu = pdu bpdu = atype() bpdu.decode(pdu) if _debug: decode_packet._debug(' - bpdu: %r', bpdu) # depends on [control=['if'], data=[]] pdu = bpdu # lift the address for forwarded NPDU's if atype is ForwardedNPDU: pdu.pduSource = bpdu.bvlciAddress # depends on [control=['if'], data=[]] # no deeper decoding for some elif atype not in (DistributeBroadcastToNetwork, OriginalUnicastNPDU, OriginalBroadcastNPDU): return pdu # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - decoding Error: %r', err) # depends on [control=['if'], data=[]] return xpdu # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]] # check for version number if pdu.pduData[0] != 1: if _debug: decode_packet._debug(' - not a version 1 packet: %s...', btox(pdu.pduData[:30], '.')) # depends on [control=['if'], data=[]] return None # depends on [control=['if'], data=[]] # it's an NPDU try: npdu = NPDU() npdu.decode(pdu) # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - decoding Error: %r', err) # depends on [control=['if'], data=[]] return None # depends on [control=['except'], data=['err']] # application or network layer message if npdu.npduNetMessage is None: if _debug: decode_packet._debug(' - not a network layer message, try as an APDU') # depends on [control=['if'], data=[]] # decode as a generic APDU try: xpdu = APDU() xpdu.decode(npdu) apdu = xpdu # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - decoding Error: %r', err) # depends on [control=['if'], data=[]] return npdu # depends on [control=['except'], data=['err']] # "lift" the source and destination address if npdu.npduSADR: apdu.pduSource = npdu.npduSADR # depends on [control=['if'], data=[]] else: apdu.pduSource = npdu.pduSource if npdu.npduDADR: apdu.pduDestination = npdu.npduDADR # depends on [control=['if'], data=[]] else: apdu.pduDestination = npdu.pduDestination # make a more focused interpretation atype = apdu_types.get(apdu.apduType) if not atype: if _debug: decode_packet._debug(' - unknown APDU type: %r', apdu.apduType) # depends on [control=['if'], data=[]] return apdu # depends on [control=['if'], data=[]] # decode it as one of the basic types try: xpdu = apdu apdu = atype() apdu.decode(xpdu) # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - decoding Error: %r', err) # depends on [control=['if'], data=[]] return xpdu # depends on [control=['except'], data=['err']] # decode it at the next level if isinstance(apdu, ConfirmedRequestPDU): atype = confirmed_request_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(' - no confirmed request decoder: %r', apdu.apduService) # depends on [control=['if'], data=[]] return apdu # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(apdu, UnconfirmedRequestPDU): atype = unconfirmed_request_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(' - no unconfirmed request decoder: %r', apdu.apduService) # depends on [control=['if'], data=[]] return apdu # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(apdu, SimpleAckPDU): atype = None # depends on [control=['if'], data=[]] elif isinstance(apdu, ComplexAckPDU): atype = complex_ack_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(' - no complex ack decoder: %r', apdu.apduService) # depends on [control=['if'], data=[]] return apdu # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(apdu, SegmentAckPDU): atype = None # depends on [control=['if'], data=[]] elif isinstance(apdu, ErrorPDU): atype = error_types.get(apdu.apduService) if not atype: if _debug: decode_packet._debug(' - no error decoder: %r', apdu.apduService) # depends on [control=['if'], data=[]] return apdu # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(apdu, RejectPDU): atype = None # depends on [control=['if'], data=[]] elif isinstance(apdu, AbortPDU): atype = None # depends on [control=['if'], data=[]] if _debug: decode_packet._debug(' - atype: %r', atype) # depends on [control=['if'], data=[]] # deeper decoding try: if atype: xpdu = apdu apdu = atype() apdu.decode(xpdu) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - decoding error: %r', err) # depends on [control=['if'], data=[]] return xpdu # depends on [control=['except'], data=['err']] # success return apdu # depends on [control=['if'], data=[]] else: # make a more focused interpretation ntype = npdu_types.get(npdu.npduNetMessage) if not ntype: if _debug: decode_packet._debug(' - no network layer decoder: %r', npdu.npduNetMessage) # depends on [control=['if'], data=[]] return npdu # depends on [control=['if'], data=[]] if _debug: decode_packet._debug(' - ntype: %r', ntype) # depends on [control=['if'], data=[]] # deeper decoding try: xpdu = npdu npdu = ntype() npdu.decode(xpdu) # depends on [control=['try'], data=[]] except Exception as err: if _debug: decode_packet._debug(' - decoding error: %r', err) # depends on [control=['if'], data=[]] return xpdu # depends on [control=['except'], data=['err']] # success return npdu
def aggregate_task_losses(hparams, problem_hparams, logits, feature_name, feature): """Multiproblem loss function.""" # If no reweighting, we want the default loss to mimic the LM loss. if not hparams.multiproblem_reweight_label_loss: return aggregate_task_lm_losses(hparams=hparams, problem_hparams=problem_hparams, logits=logits, feature_name=feature_name, feature=feature) summaries = [] main_task_id = hparams.problem.task_list[0].task_id vocab_size = problem_hparams.vocab_size[feature_name] if vocab_size is not None and hasattr(hparams, "vocab_divisor"): vocab_size += (-vocab_size) % hparams.vocab_divisor modality = problem_hparams.modality[feature_name] loss = hparams.loss.get(feature_name, modalities.get_loss(modality)) weights_fn = hparams.weights_fn.get( feature_name, modalities.get_weights_fn(modality)) # Primary task loss loss_num, loss_den = loss( logits, feature, lambda x: common_layers.weights_multi_problem_all(x, main_task_id), hparams, vocab_size, weights_fn) loss_val = loss_num / tf.maximum(1.0, loss_den) summaries.append([hparams.problem.task_list[0].name+"_loss", loss_val]) # Since the losses may undergo rescaling, they cannot exist as separate # numerators and denominators. Set the denominators to 1 in order to faciliate # loss averaging. loss_num = loss_val loss_den = tf.minimum(tf.convert_to_tensor(1, dtype=tf.float32), loss_den) for task in hparams.problem.task_list[1:]: # Loss only from the input sequence -- the auxiliary LM loss. seq_loss_num, seq_loss_den = loss( logits, feature, lambda x: common_layers.weights_multi_problem_input(x, task.task_id), # pylint: disable=cell-var-from-loop hparams, vocab_size) seq_loss_num *= problem_hparams.loss_multiplier # Unscaled sequence loss. seq_loss = seq_loss_num / tf.maximum(1.0, seq_loss_den) summaries.append([task.name+"_seq_loss", seq_loss]) if hasattr(task, "num_classes"): # Loss only from the classification label. label_loss_num, label_loss_den = loss( logits, feature, lambda x: common_layers.weights_multi_problem(x, task.task_id), # pylint: disable=cell-var-from-loop hparams, vocab_size) label_loss_num *= problem_hparams.loss_multiplier # Unscaled classification label loss. label_loss = label_loss_num / tf.maximum(1.0, label_loss_den) summaries.append([task.name+"_label_loss", label_loss]) # Scaling. if hparams.multiproblem_reweight_label_loss: label_loss *= hparams.multiproblem_label_weight seq_loss *= (1 - hparams.multiproblem_label_weight) # This is the training loss for the optimizer after scaling. task_loss_val = seq_loss + label_loss loss_den_ = label_loss_den else: # Loss only from the target sequence. target_loss_num, target_loss_den = loss( logits, feature, lambda x: common_layers.weights_multi_problem(x, task.task_id), # pylint: disable=cell-var-from-loop hparams, vocab_size) target_loss_num *= problem_hparams.loss_multiplier # Unscaled target sequence loss. target_loss = target_loss_num / tf.maximum(1.0, target_loss_den) summaries.append([task.name+"_target_loss", target_loss]) # Scaling. if hparams.multiproblem_reweight_label_loss: target_loss *= hparams.multiproblem_label_weight seq_loss *= (1 - hparams.multiproblem_label_weight) # This is the training loss for the optimizer after all the scaling. task_loss_val = seq_loss + target_loss loss_den_ = target_loss_den summaries.append([task.name+"_loss", task_loss_val]) # Adding 1 to the loss den for each task leads to averaging task losses. # TODO(urvashik): Fix combination with other task losses - weighted # average based on the number of examples from that task. loss_num += task_loss_val loss_den += tf.minimum(tf.convert_to_tensor(1, dtype=tf.float32), loss_den_) return loss_num, loss_den, summaries
def function[aggregate_task_losses, parameter[hparams, problem_hparams, logits, feature_name, feature]]: constant[Multiproblem loss function.] if <ast.UnaryOp object at 0x7da18c4cf0d0> begin[:] return[call[name[aggregate_task_lm_losses], parameter[]]] variable[summaries] assign[=] list[[]] variable[main_task_id] assign[=] call[name[hparams].problem.task_list][constant[0]].task_id variable[vocab_size] assign[=] call[name[problem_hparams].vocab_size][name[feature_name]] if <ast.BoolOp object at 0x7da18c4cc5e0> begin[:] <ast.AugAssign object at 0x7da18c4cebf0> variable[modality] assign[=] call[name[problem_hparams].modality][name[feature_name]] variable[loss] assign[=] call[name[hparams].loss.get, parameter[name[feature_name], call[name[modalities].get_loss, parameter[name[modality]]]]] variable[weights_fn] assign[=] call[name[hparams].weights_fn.get, parameter[name[feature_name], call[name[modalities].get_weights_fn, parameter[name[modality]]]]] <ast.Tuple object at 0x7da18c4cd630> assign[=] call[name[loss], parameter[name[logits], name[feature], <ast.Lambda object at 0x7da18c4cca60>, name[hparams], name[vocab_size], name[weights_fn]]] variable[loss_val] assign[=] binary_operation[name[loss_num] / call[name[tf].maximum, parameter[constant[1.0], name[loss_den]]]] call[name[summaries].append, parameter[list[[<ast.BinOp object at 0x7da18c4cdea0>, <ast.Name object at 0x7da18c4cfd00>]]]] variable[loss_num] assign[=] name[loss_val] variable[loss_den] assign[=] call[name[tf].minimum, parameter[call[name[tf].convert_to_tensor, parameter[constant[1]]], name[loss_den]]] for taget[name[task]] in starred[call[name[hparams].problem.task_list][<ast.Slice object at 0x7da18c4cd360>]] begin[:] <ast.Tuple object at 0x7da18c4cef80> assign[=] call[name[loss], parameter[name[logits], name[feature], <ast.Lambda object at 0x7da18c4cc8b0>, name[hparams], name[vocab_size]]] <ast.AugAssign object at 0x7da18c4cdf90> variable[seq_loss] assign[=] binary_operation[name[seq_loss_num] / call[name[tf].maximum, parameter[constant[1.0], name[seq_loss_den]]]] call[name[summaries].append, parameter[list[[<ast.BinOp object at 0x7da18c4cfdc0>, <ast.Name object at 0x7da1b201fc70>]]]] if call[name[hasattr], parameter[name[task], constant[num_classes]]] begin[:] <ast.Tuple object at 0x7da1b201f6d0> assign[=] call[name[loss], parameter[name[logits], name[feature], <ast.Lambda object at 0x7da1b201d2a0>, name[hparams], name[vocab_size]]] <ast.AugAssign object at 0x7da1b1e10550> variable[label_loss] assign[=] binary_operation[name[label_loss_num] / call[name[tf].maximum, parameter[constant[1.0], name[label_loss_den]]]] call[name[summaries].append, parameter[list[[<ast.BinOp object at 0x7da1b201cf70>, <ast.Name object at 0x7da1b201e290>]]]] if name[hparams].multiproblem_reweight_label_loss begin[:] <ast.AugAssign object at 0x7da1b201f820> <ast.AugAssign object at 0x7da1b201c160> variable[task_loss_val] assign[=] binary_operation[name[seq_loss] + name[label_loss]] variable[loss_den_] assign[=] name[label_loss_den] call[name[summaries].append, parameter[list[[<ast.BinOp object at 0x7da1b201dc00>, <ast.Name object at 0x7da1b201d8a0>]]]] <ast.AugAssign object at 0x7da1b201c280> <ast.AugAssign object at 0x7da1b201d990> return[tuple[[<ast.Name object at 0x7da1b201feb0>, <ast.Name object at 0x7da1b201d9c0>, <ast.Name object at 0x7da1b201e200>]]]
keyword[def] identifier[aggregate_task_losses] ( identifier[hparams] , identifier[problem_hparams] , identifier[logits] , identifier[feature_name] , identifier[feature] ): literal[string] keyword[if] keyword[not] identifier[hparams] . identifier[multiproblem_reweight_label_loss] : keyword[return] identifier[aggregate_task_lm_losses] ( identifier[hparams] = identifier[hparams] , identifier[problem_hparams] = identifier[problem_hparams] , identifier[logits] = identifier[logits] , identifier[feature_name] = identifier[feature_name] , identifier[feature] = identifier[feature] ) identifier[summaries] =[] identifier[main_task_id] = identifier[hparams] . identifier[problem] . identifier[task_list] [ literal[int] ]. identifier[task_id] identifier[vocab_size] = identifier[problem_hparams] . identifier[vocab_size] [ identifier[feature_name] ] keyword[if] identifier[vocab_size] keyword[is] keyword[not] keyword[None] keyword[and] identifier[hasattr] ( identifier[hparams] , literal[string] ): identifier[vocab_size] +=(- identifier[vocab_size] )% identifier[hparams] . identifier[vocab_divisor] identifier[modality] = identifier[problem_hparams] . identifier[modality] [ identifier[feature_name] ] identifier[loss] = identifier[hparams] . identifier[loss] . identifier[get] ( identifier[feature_name] , identifier[modalities] . identifier[get_loss] ( identifier[modality] )) identifier[weights_fn] = identifier[hparams] . identifier[weights_fn] . identifier[get] ( identifier[feature_name] , identifier[modalities] . identifier[get_weights_fn] ( identifier[modality] )) identifier[loss_num] , identifier[loss_den] = identifier[loss] ( identifier[logits] , identifier[feature] , keyword[lambda] identifier[x] : identifier[common_layers] . identifier[weights_multi_problem_all] ( identifier[x] , identifier[main_task_id] ), identifier[hparams] , identifier[vocab_size] , identifier[weights_fn] ) identifier[loss_val] = identifier[loss_num] / identifier[tf] . identifier[maximum] ( literal[int] , identifier[loss_den] ) identifier[summaries] . identifier[append] ([ identifier[hparams] . identifier[problem] . identifier[task_list] [ literal[int] ]. identifier[name] + literal[string] , identifier[loss_val] ]) identifier[loss_num] = identifier[loss_val] identifier[loss_den] = identifier[tf] . identifier[minimum] ( identifier[tf] . identifier[convert_to_tensor] ( literal[int] , identifier[dtype] = identifier[tf] . identifier[float32] ), identifier[loss_den] ) keyword[for] identifier[task] keyword[in] identifier[hparams] . identifier[problem] . identifier[task_list] [ literal[int] :]: identifier[seq_loss_num] , identifier[seq_loss_den] = identifier[loss] ( identifier[logits] , identifier[feature] , keyword[lambda] identifier[x] : identifier[common_layers] . identifier[weights_multi_problem_input] ( identifier[x] , identifier[task] . identifier[task_id] ), identifier[hparams] , identifier[vocab_size] ) identifier[seq_loss_num] *= identifier[problem_hparams] . identifier[loss_multiplier] identifier[seq_loss] = identifier[seq_loss_num] / identifier[tf] . identifier[maximum] ( literal[int] , identifier[seq_loss_den] ) identifier[summaries] . identifier[append] ([ identifier[task] . identifier[name] + literal[string] , identifier[seq_loss] ]) keyword[if] identifier[hasattr] ( identifier[task] , literal[string] ): identifier[label_loss_num] , identifier[label_loss_den] = identifier[loss] ( identifier[logits] , identifier[feature] , keyword[lambda] identifier[x] : identifier[common_layers] . identifier[weights_multi_problem] ( identifier[x] , identifier[task] . identifier[task_id] ), identifier[hparams] , identifier[vocab_size] ) identifier[label_loss_num] *= identifier[problem_hparams] . identifier[loss_multiplier] identifier[label_loss] = identifier[label_loss_num] / identifier[tf] . identifier[maximum] ( literal[int] , identifier[label_loss_den] ) identifier[summaries] . identifier[append] ([ identifier[task] . identifier[name] + literal[string] , identifier[label_loss] ]) keyword[if] identifier[hparams] . identifier[multiproblem_reweight_label_loss] : identifier[label_loss] *= identifier[hparams] . identifier[multiproblem_label_weight] identifier[seq_loss] *=( literal[int] - identifier[hparams] . identifier[multiproblem_label_weight] ) identifier[task_loss_val] = identifier[seq_loss] + identifier[label_loss] identifier[loss_den_] = identifier[label_loss_den] keyword[else] : identifier[target_loss_num] , identifier[target_loss_den] = identifier[loss] ( identifier[logits] , identifier[feature] , keyword[lambda] identifier[x] : identifier[common_layers] . identifier[weights_multi_problem] ( identifier[x] , identifier[task] . identifier[task_id] ), identifier[hparams] , identifier[vocab_size] ) identifier[target_loss_num] *= identifier[problem_hparams] . identifier[loss_multiplier] identifier[target_loss] = identifier[target_loss_num] / identifier[tf] . identifier[maximum] ( literal[int] , identifier[target_loss_den] ) identifier[summaries] . identifier[append] ([ identifier[task] . identifier[name] + literal[string] , identifier[target_loss] ]) keyword[if] identifier[hparams] . identifier[multiproblem_reweight_label_loss] : identifier[target_loss] *= identifier[hparams] . identifier[multiproblem_label_weight] identifier[seq_loss] *=( literal[int] - identifier[hparams] . identifier[multiproblem_label_weight] ) identifier[task_loss_val] = identifier[seq_loss] + identifier[target_loss] identifier[loss_den_] = identifier[target_loss_den] identifier[summaries] . identifier[append] ([ identifier[task] . identifier[name] + literal[string] , identifier[task_loss_val] ]) identifier[loss_num] += identifier[task_loss_val] identifier[loss_den] += identifier[tf] . identifier[minimum] ( identifier[tf] . identifier[convert_to_tensor] ( literal[int] , identifier[dtype] = identifier[tf] . identifier[float32] ), identifier[loss_den_] ) keyword[return] identifier[loss_num] , identifier[loss_den] , identifier[summaries]
def aggregate_task_losses(hparams, problem_hparams, logits, feature_name, feature): """Multiproblem loss function.""" # If no reweighting, we want the default loss to mimic the LM loss. if not hparams.multiproblem_reweight_label_loss: return aggregate_task_lm_losses(hparams=hparams, problem_hparams=problem_hparams, logits=logits, feature_name=feature_name, feature=feature) # depends on [control=['if'], data=[]] summaries = [] main_task_id = hparams.problem.task_list[0].task_id vocab_size = problem_hparams.vocab_size[feature_name] if vocab_size is not None and hasattr(hparams, 'vocab_divisor'): vocab_size += -vocab_size % hparams.vocab_divisor # depends on [control=['if'], data=[]] modality = problem_hparams.modality[feature_name] loss = hparams.loss.get(feature_name, modalities.get_loss(modality)) weights_fn = hparams.weights_fn.get(feature_name, modalities.get_weights_fn(modality)) # Primary task loss (loss_num, loss_den) = loss(logits, feature, lambda x: common_layers.weights_multi_problem_all(x, main_task_id), hparams, vocab_size, weights_fn) loss_val = loss_num / tf.maximum(1.0, loss_den) summaries.append([hparams.problem.task_list[0].name + '_loss', loss_val]) # Since the losses may undergo rescaling, they cannot exist as separate # numerators and denominators. Set the denominators to 1 in order to faciliate # loss averaging. loss_num = loss_val loss_den = tf.minimum(tf.convert_to_tensor(1, dtype=tf.float32), loss_den) for task in hparams.problem.task_list[1:]: # Loss only from the input sequence -- the auxiliary LM loss. # pylint: disable=cell-var-from-loop (seq_loss_num, seq_loss_den) = loss(logits, feature, lambda x: common_layers.weights_multi_problem_input(x, task.task_id), hparams, vocab_size) seq_loss_num *= problem_hparams.loss_multiplier # Unscaled sequence loss. seq_loss = seq_loss_num / tf.maximum(1.0, seq_loss_den) summaries.append([task.name + '_seq_loss', seq_loss]) if hasattr(task, 'num_classes'): # Loss only from the classification label. # pylint: disable=cell-var-from-loop (label_loss_num, label_loss_den) = loss(logits, feature, lambda x: common_layers.weights_multi_problem(x, task.task_id), hparams, vocab_size) label_loss_num *= problem_hparams.loss_multiplier # Unscaled classification label loss. label_loss = label_loss_num / tf.maximum(1.0, label_loss_den) summaries.append([task.name + '_label_loss', label_loss]) # Scaling. if hparams.multiproblem_reweight_label_loss: label_loss *= hparams.multiproblem_label_weight seq_loss *= 1 - hparams.multiproblem_label_weight # depends on [control=['if'], data=[]] # This is the training loss for the optimizer after scaling. task_loss_val = seq_loss + label_loss loss_den_ = label_loss_den # depends on [control=['if'], data=[]] else: # Loss only from the target sequence. # pylint: disable=cell-var-from-loop (target_loss_num, target_loss_den) = loss(logits, feature, lambda x: common_layers.weights_multi_problem(x, task.task_id), hparams, vocab_size) target_loss_num *= problem_hparams.loss_multiplier # Unscaled target sequence loss. target_loss = target_loss_num / tf.maximum(1.0, target_loss_den) summaries.append([task.name + '_target_loss', target_loss]) # Scaling. if hparams.multiproblem_reweight_label_loss: target_loss *= hparams.multiproblem_label_weight seq_loss *= 1 - hparams.multiproblem_label_weight # depends on [control=['if'], data=[]] # This is the training loss for the optimizer after all the scaling. task_loss_val = seq_loss + target_loss loss_den_ = target_loss_den summaries.append([task.name + '_loss', task_loss_val]) # Adding 1 to the loss den for each task leads to averaging task losses. # TODO(urvashik): Fix combination with other task losses - weighted # average based on the number of examples from that task. loss_num += task_loss_val loss_den += tf.minimum(tf.convert_to_tensor(1, dtype=tf.float32), loss_den_) # depends on [control=['for'], data=['task']] return (loss_num, loss_den, summaries)
def get_upscaled_value_tuple(self): """ Scales an RGB color object from decimal 0.0-1.0 to int 0-255. """ # Scale up to 0-255 values. rgb_r = int(math.floor(0.5 + self.rgb_r * 255)) rgb_g = int(math.floor(0.5 + self.rgb_g * 255)) rgb_b = int(math.floor(0.5 + self.rgb_b * 255)) return rgb_r, rgb_g, rgb_b
def function[get_upscaled_value_tuple, parameter[self]]: constant[ Scales an RGB color object from decimal 0.0-1.0 to int 0-255. ] variable[rgb_r] assign[=] call[name[int], parameter[call[name[math].floor, parameter[binary_operation[constant[0.5] + binary_operation[name[self].rgb_r * constant[255]]]]]]] variable[rgb_g] assign[=] call[name[int], parameter[call[name[math].floor, parameter[binary_operation[constant[0.5] + binary_operation[name[self].rgb_g * constant[255]]]]]]] variable[rgb_b] assign[=] call[name[int], parameter[call[name[math].floor, parameter[binary_operation[constant[0.5] + binary_operation[name[self].rgb_b * constant[255]]]]]]] return[tuple[[<ast.Name object at 0x7da2041d8490>, <ast.Name object at 0x7da2041db100>, <ast.Name object at 0x7da2041db310>]]]
keyword[def] identifier[get_upscaled_value_tuple] ( identifier[self] ): literal[string] identifier[rgb_r] = identifier[int] ( identifier[math] . identifier[floor] ( literal[int] + identifier[self] . identifier[rgb_r] * literal[int] )) identifier[rgb_g] = identifier[int] ( identifier[math] . identifier[floor] ( literal[int] + identifier[self] . identifier[rgb_g] * literal[int] )) identifier[rgb_b] = identifier[int] ( identifier[math] . identifier[floor] ( literal[int] + identifier[self] . identifier[rgb_b] * literal[int] )) keyword[return] identifier[rgb_r] , identifier[rgb_g] , identifier[rgb_b]
def get_upscaled_value_tuple(self): """ Scales an RGB color object from decimal 0.0-1.0 to int 0-255. """ # Scale up to 0-255 values. rgb_r = int(math.floor(0.5 + self.rgb_r * 255)) rgb_g = int(math.floor(0.5 + self.rgb_g * 255)) rgb_b = int(math.floor(0.5 + self.rgb_b * 255)) return (rgb_r, rgb_g, rgb_b)
def _precompute_absense_costs(dictionary, removal_costs, insertion_costs, n, allow_spaces=False): """ Вычисляет минимальную стоимость появления нового символа в узлах словаря в соответствии со штрафами из costs Аргументы: --------------- dictionary : Trie словарь, хранящийся в виде ациклического автомата removal_costs : dict штрафы за удаление символов insertion_costs : dict штрафы за вставку символов n : int глубина ``заглядывания вперёд'' в словаре Возвращает --------------- answer : list of dicts, len(answer)=len(dictionary) answer[i][a][j] равно минимальному штрафу за появление символа a в j-ой позиции в вершине с номером i """ answer = [dict() for node in dictionary.data] if n == 0: return answer curr_alphabet = copy.copy(dictionary.alphabet) if allow_spaces: curr_alphabet += [' '] for l, (costs_in_node, node) in enumerate(zip(answer, dictionary.data)): # определение минимальной стоимости удаления символов curr_node_removal_costs = np.empty(dtype=np.float64, shape=(n,)) if len(node[0]) > 0: curr_node_removal_costs[0] = min(removal_costs[symbol] for symbol in node[0]) for j, symbols in enumerate(node[1:], 1): if len(symbols) == 0: curr_node_removal_costs[j:] = curr_node_removal_costs[j-1] break curr_cost = min(removal_costs[symbol] for symbol in symbols) curr_node_removal_costs[j] = min(curr_node_removal_costs[j-1], curr_cost) else: curr_node_removal_costs[:] = np.inf # определение минимальной стоимости вставки for a in curr_alphabet: curr_symbol_costs = np.empty(dtype=np.float64, shape=(n,)) curr_symbol_costs.fill(insertion_costs[a]) for j, symbols in enumerate(node): if a in symbols: curr_symbol_costs[j:] = 0.0 break curr_symbol_costs[j] = min(curr_symbol_costs[j], curr_node_removal_costs[j]) costs_in_node[a] = curr_symbol_costs return answer
def function[_precompute_absense_costs, parameter[dictionary, removal_costs, insertion_costs, n, allow_spaces]]: constant[ Вычисляет минимальную стоимость появления нового символа в узлах словаря в соответствии со штрафами из costs Аргументы: --------------- dictionary : Trie словарь, хранящийся в виде ациклического автомата removal_costs : dict штрафы за удаление символов insertion_costs : dict штрафы за вставку символов n : int глубина ``заглядывания вперёд'' в словаре Возвращает --------------- answer : list of dicts, len(answer)=len(dictionary) answer[i][a][j] равно минимальному штрафу за появление символа a в j-ой позиции в вершине с номером i ] variable[answer] assign[=] <ast.ListComp object at 0x7da20e74a8f0> if compare[name[n] equal[==] constant[0]] begin[:] return[name[answer]] variable[curr_alphabet] assign[=] call[name[copy].copy, parameter[name[dictionary].alphabet]] if name[allow_spaces] begin[:] <ast.AugAssign object at 0x7da20e74a1a0> for taget[tuple[[<ast.Name object at 0x7da20e74b820>, <ast.Tuple object at 0x7da20e74baf0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[answer], name[dictionary].data]]]]] begin[:] variable[curr_node_removal_costs] assign[=] call[name[np].empty, parameter[]] if compare[call[name[len], parameter[call[name[node]][constant[0]]]] greater[>] constant[0]] begin[:] call[name[curr_node_removal_costs]][constant[0]] assign[=] call[name[min], parameter[<ast.GeneratorExp object at 0x7da20e963b80>]] for taget[tuple[[<ast.Name object at 0x7da20e961ab0>, <ast.Name object at 0x7da20e962110>]]] in starred[call[name[enumerate], parameter[call[name[node]][<ast.Slice object at 0x7da20e963e80>], constant[1]]]] begin[:] if compare[call[name[len], parameter[name[symbols]]] equal[==] constant[0]] begin[:] call[name[curr_node_removal_costs]][<ast.Slice object at 0x7da20e962890>] assign[=] call[name[curr_node_removal_costs]][binary_operation[name[j] - constant[1]]] break variable[curr_cost] assign[=] call[name[min], parameter[<ast.GeneratorExp object at 0x7da20e963970>]] call[name[curr_node_removal_costs]][name[j]] assign[=] call[name[min], parameter[call[name[curr_node_removal_costs]][binary_operation[name[j] - constant[1]]], name[curr_cost]]] for taget[name[a]] in starred[name[curr_alphabet]] begin[:] variable[curr_symbol_costs] assign[=] call[name[np].empty, parameter[]] call[name[curr_symbol_costs].fill, parameter[call[name[insertion_costs]][name[a]]]] for taget[tuple[[<ast.Name object at 0x7da1b03a06a0>, <ast.Name object at 0x7da1b03a16c0>]]] in starred[call[name[enumerate], parameter[name[node]]]] begin[:] if compare[name[a] in name[symbols]] begin[:] call[name[curr_symbol_costs]][<ast.Slice object at 0x7da20c6e4340>] assign[=] constant[0.0] break call[name[curr_symbol_costs]][name[j]] assign[=] call[name[min], parameter[call[name[curr_symbol_costs]][name[j]], call[name[curr_node_removal_costs]][name[j]]]] call[name[costs_in_node]][name[a]] assign[=] name[curr_symbol_costs] return[name[answer]]
keyword[def] identifier[_precompute_absense_costs] ( identifier[dictionary] , identifier[removal_costs] , identifier[insertion_costs] , identifier[n] , identifier[allow_spaces] = keyword[False] ): literal[string] identifier[answer] =[ identifier[dict] () keyword[for] identifier[node] keyword[in] identifier[dictionary] . identifier[data] ] keyword[if] identifier[n] == literal[int] : keyword[return] identifier[answer] identifier[curr_alphabet] = identifier[copy] . identifier[copy] ( identifier[dictionary] . identifier[alphabet] ) keyword[if] identifier[allow_spaces] : identifier[curr_alphabet] +=[ literal[string] ] keyword[for] identifier[l] ,( identifier[costs_in_node] , identifier[node] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[answer] , identifier[dictionary] . identifier[data] )): identifier[curr_node_removal_costs] = identifier[np] . identifier[empty] ( identifier[dtype] = identifier[np] . identifier[float64] , identifier[shape] =( identifier[n] ,)) keyword[if] identifier[len] ( identifier[node] [ literal[int] ])> literal[int] : identifier[curr_node_removal_costs] [ literal[int] ]= identifier[min] ( identifier[removal_costs] [ identifier[symbol] ] keyword[for] identifier[symbol] keyword[in] identifier[node] [ literal[int] ]) keyword[for] identifier[j] , identifier[symbols] keyword[in] identifier[enumerate] ( identifier[node] [ literal[int] :], literal[int] ): keyword[if] identifier[len] ( identifier[symbols] )== literal[int] : identifier[curr_node_removal_costs] [ identifier[j] :]= identifier[curr_node_removal_costs] [ identifier[j] - literal[int] ] keyword[break] identifier[curr_cost] = identifier[min] ( identifier[removal_costs] [ identifier[symbol] ] keyword[for] identifier[symbol] keyword[in] identifier[symbols] ) identifier[curr_node_removal_costs] [ identifier[j] ]= identifier[min] ( identifier[curr_node_removal_costs] [ identifier[j] - literal[int] ], identifier[curr_cost] ) keyword[else] : identifier[curr_node_removal_costs] [:]= identifier[np] . identifier[inf] keyword[for] identifier[a] keyword[in] identifier[curr_alphabet] : identifier[curr_symbol_costs] = identifier[np] . identifier[empty] ( identifier[dtype] = identifier[np] . identifier[float64] , identifier[shape] =( identifier[n] ,)) identifier[curr_symbol_costs] . identifier[fill] ( identifier[insertion_costs] [ identifier[a] ]) keyword[for] identifier[j] , identifier[symbols] keyword[in] identifier[enumerate] ( identifier[node] ): keyword[if] identifier[a] keyword[in] identifier[symbols] : identifier[curr_symbol_costs] [ identifier[j] :]= literal[int] keyword[break] identifier[curr_symbol_costs] [ identifier[j] ]= identifier[min] ( identifier[curr_symbol_costs] [ identifier[j] ], identifier[curr_node_removal_costs] [ identifier[j] ]) identifier[costs_in_node] [ identifier[a] ]= identifier[curr_symbol_costs] keyword[return] identifier[answer]
def _precompute_absense_costs(dictionary, removal_costs, insertion_costs, n, allow_spaces=False): """ Вычисляет минимальную стоимость появления нового символа в узлах словаря в соответствии со штрафами из costs Аргументы: --------------- dictionary : Trie словарь, хранящийся в виде ациклического автомата removal_costs : dict штрафы за удаление символов insertion_costs : dict штрафы за вставку символов n : int глубина ``заглядывания вперёд'' в словаре Возвращает --------------- answer : list of dicts, len(answer)=len(dictionary) answer[i][a][j] равно минимальному штрафу за появление символа a в j-ой позиции в вершине с номером i """ answer = [dict() for node in dictionary.data] if n == 0: return answer # depends on [control=['if'], data=[]] curr_alphabet = copy.copy(dictionary.alphabet) if allow_spaces: curr_alphabet += [' '] # depends on [control=['if'], data=[]] for (l, (costs_in_node, node)) in enumerate(zip(answer, dictionary.data)): # определение минимальной стоимости удаления символов curr_node_removal_costs = np.empty(dtype=np.float64, shape=(n,)) if len(node[0]) > 0: curr_node_removal_costs[0] = min((removal_costs[symbol] for symbol in node[0])) for (j, symbols) in enumerate(node[1:], 1): if len(symbols) == 0: curr_node_removal_costs[j:] = curr_node_removal_costs[j - 1] break # depends on [control=['if'], data=[]] curr_cost = min((removal_costs[symbol] for symbol in symbols)) curr_node_removal_costs[j] = min(curr_node_removal_costs[j - 1], curr_cost) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: curr_node_removal_costs[:] = np.inf # определение минимальной стоимости вставки for a in curr_alphabet: curr_symbol_costs = np.empty(dtype=np.float64, shape=(n,)) curr_symbol_costs.fill(insertion_costs[a]) for (j, symbols) in enumerate(node): if a in symbols: curr_symbol_costs[j:] = 0.0 break # depends on [control=['if'], data=[]] curr_symbol_costs[j] = min(curr_symbol_costs[j], curr_node_removal_costs[j]) # depends on [control=['for'], data=[]] costs_in_node[a] = curr_symbol_costs # depends on [control=['for'], data=['a']] # depends on [control=['for'], data=[]] return answer
def feed(self, *args): '''Set the input(s) for the next operation by replacing the terminal nodes. The arguments can be either layer names or the actual layers. ''' assert len(args) != 0 self.terminals = [] for fed_layer in args: if isinstance(fed_layer, str): try: fed_layer = self.layers[fed_layer] except KeyError: raise KeyError('Unknown layer name fed: %s' % fed_layer) self.terminals.append(fed_layer) return self
def function[feed, parameter[self]]: constant[Set the input(s) for the next operation by replacing the terminal nodes. The arguments can be either layer names or the actual layers. ] assert[compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[0]]] name[self].terminals assign[=] list[[]] for taget[name[fed_layer]] in starred[name[args]] begin[:] if call[name[isinstance], parameter[name[fed_layer], name[str]]] begin[:] <ast.Try object at 0x7da1b1a2ceb0> call[name[self].terminals.append, parameter[name[fed_layer]]] return[name[self]]
keyword[def] identifier[feed] ( identifier[self] ,* identifier[args] ): literal[string] keyword[assert] identifier[len] ( identifier[args] )!= literal[int] identifier[self] . identifier[terminals] =[] keyword[for] identifier[fed_layer] keyword[in] identifier[args] : keyword[if] identifier[isinstance] ( identifier[fed_layer] , identifier[str] ): keyword[try] : identifier[fed_layer] = identifier[self] . identifier[layers] [ identifier[fed_layer] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[KeyError] ( literal[string] % identifier[fed_layer] ) identifier[self] . identifier[terminals] . identifier[append] ( identifier[fed_layer] ) keyword[return] identifier[self]
def feed(self, *args): """Set the input(s) for the next operation by replacing the terminal nodes. The arguments can be either layer names or the actual layers. """ assert len(args) != 0 self.terminals = [] for fed_layer in args: if isinstance(fed_layer, str): try: fed_layer = self.layers[fed_layer] # depends on [control=['try'], data=[]] except KeyError: raise KeyError('Unknown layer name fed: %s' % fed_layer) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] self.terminals.append(fed_layer) # depends on [control=['for'], data=['fed_layer']] return self
def _badpath(path, base): """ joinpath will ignore base if path is absolute. """ return not _resolved(os.path.join(base, path)).startswith(base)
def function[_badpath, parameter[path, base]]: constant[ joinpath will ignore base if path is absolute. ] return[<ast.UnaryOp object at 0x7da20c6a92d0>]
keyword[def] identifier[_badpath] ( identifier[path] , identifier[base] ): literal[string] keyword[return] keyword[not] identifier[_resolved] ( identifier[os] . identifier[path] . identifier[join] ( identifier[base] , identifier[path] )). identifier[startswith] ( identifier[base] )
def _badpath(path, base): """ joinpath will ignore base if path is absolute. """ return not _resolved(os.path.join(base, path)).startswith(base)
def detectBOM(self): """Attempts to detect at BOM at the start of the stream. If an encoding can be determined from the BOM return the name of the encoding otherwise return None""" bomDict = { codecs.BOM_UTF8: 'utf-8', codecs.BOM_UTF16_LE: 'utf-16-le', codecs.BOM_UTF16_BE: 'utf-16-be', codecs.BOM_UTF32_LE: 'utf-32-le', codecs.BOM_UTF32_BE: 'utf-32-be' } # Go to beginning of file and read in 4 bytes string = self.rawStream.read(4) assert isinstance(string, bytes) # Try detecting the BOM using bytes from the string encoding = bomDict.get(string[:3]) # UTF-8 seek = 3 if not encoding: # Need to detect UTF-32 before UTF-16 encoding = bomDict.get(string) # UTF-32 seek = 4 if not encoding: encoding = bomDict.get(string[:2]) # UTF-16 seek = 2 # Set the read position past the BOM if one was found, otherwise # set it to the start of the stream self.rawStream.seek(encoding and seek or 0) return encoding
def function[detectBOM, parameter[self]]: constant[Attempts to detect at BOM at the start of the stream. If an encoding can be determined from the BOM return the name of the encoding otherwise return None] variable[bomDict] assign[=] dictionary[[<ast.Attribute object at 0x7da204623220>, <ast.Attribute object at 0x7da2046236d0>, <ast.Attribute object at 0x7da2046210c0>, <ast.Attribute object at 0x7da204620a60>, <ast.Attribute object at 0x7da204621f90>], [<ast.Constant object at 0x7da204623640>, <ast.Constant object at 0x7da2046232e0>, <ast.Constant object at 0x7da2046223b0>, <ast.Constant object at 0x7da204623fd0>, <ast.Constant object at 0x7da204620b50>]] variable[string] assign[=] call[name[self].rawStream.read, parameter[constant[4]]] assert[call[name[isinstance], parameter[name[string], name[bytes]]]] variable[encoding] assign[=] call[name[bomDict].get, parameter[call[name[string]][<ast.Slice object at 0x7da204621780>]]] variable[seek] assign[=] constant[3] if <ast.UnaryOp object at 0x7da204620b20> begin[:] variable[encoding] assign[=] call[name[bomDict].get, parameter[name[string]]] variable[seek] assign[=] constant[4] if <ast.UnaryOp object at 0x7da204621480> begin[:] variable[encoding] assign[=] call[name[bomDict].get, parameter[call[name[string]][<ast.Slice object at 0x7da204621270>]]] variable[seek] assign[=] constant[2] call[name[self].rawStream.seek, parameter[<ast.BoolOp object at 0x7da2046209a0>]] return[name[encoding]]
keyword[def] identifier[detectBOM] ( identifier[self] ): literal[string] identifier[bomDict] ={ identifier[codecs] . identifier[BOM_UTF8] : literal[string] , identifier[codecs] . identifier[BOM_UTF16_LE] : literal[string] , identifier[codecs] . identifier[BOM_UTF16_BE] : literal[string] , identifier[codecs] . identifier[BOM_UTF32_LE] : literal[string] , identifier[codecs] . identifier[BOM_UTF32_BE] : literal[string] } identifier[string] = identifier[self] . identifier[rawStream] . identifier[read] ( literal[int] ) keyword[assert] identifier[isinstance] ( identifier[string] , identifier[bytes] ) identifier[encoding] = identifier[bomDict] . identifier[get] ( identifier[string] [: literal[int] ]) identifier[seek] = literal[int] keyword[if] keyword[not] identifier[encoding] : identifier[encoding] = identifier[bomDict] . identifier[get] ( identifier[string] ) identifier[seek] = literal[int] keyword[if] keyword[not] identifier[encoding] : identifier[encoding] = identifier[bomDict] . identifier[get] ( identifier[string] [: literal[int] ]) identifier[seek] = literal[int] identifier[self] . identifier[rawStream] . identifier[seek] ( identifier[encoding] keyword[and] identifier[seek] keyword[or] literal[int] ) keyword[return] identifier[encoding]
def detectBOM(self): """Attempts to detect at BOM at the start of the stream. If an encoding can be determined from the BOM return the name of the encoding otherwise return None""" bomDict = {codecs.BOM_UTF8: 'utf-8', codecs.BOM_UTF16_LE: 'utf-16-le', codecs.BOM_UTF16_BE: 'utf-16-be', codecs.BOM_UTF32_LE: 'utf-32-le', codecs.BOM_UTF32_BE: 'utf-32-be'} # Go to beginning of file and read in 4 bytes string = self.rawStream.read(4) assert isinstance(string, bytes) # Try detecting the BOM using bytes from the string encoding = bomDict.get(string[:3]) # UTF-8 seek = 3 if not encoding: # Need to detect UTF-32 before UTF-16 encoding = bomDict.get(string) # UTF-32 seek = 4 if not encoding: encoding = bomDict.get(string[:2]) # UTF-16 seek = 2 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Set the read position past the BOM if one was found, otherwise # set it to the start of the stream self.rawStream.seek(encoding and seek or 0) return encoding
def register_admin_models(admin_site): """Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object. """ global __MODELS_REGISTRY prefs = get_prefs() for app_label, prefs_items in prefs.items(): model_class = get_pref_model_class(app_label, prefs_items, get_app_prefs) if model_class is not None: __MODELS_REGISTRY[app_label] = model_class admin_site.register(model_class, get_pref_model_admin_class(prefs_items))
def function[register_admin_models, parameter[admin_site]]: constant[Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object. ] <ast.Global object at 0x7da1afe1bd30> variable[prefs] assign[=] call[name[get_prefs], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b003fb20>, <ast.Name object at 0x7da1b003c040>]]] in starred[call[name[prefs].items, parameter[]]] begin[:] variable[model_class] assign[=] call[name[get_pref_model_class], parameter[name[app_label], name[prefs_items], name[get_app_prefs]]] if compare[name[model_class] is_not constant[None]] begin[:] call[name[__MODELS_REGISTRY]][name[app_label]] assign[=] name[model_class] call[name[admin_site].register, parameter[name[model_class], call[name[get_pref_model_admin_class], parameter[name[prefs_items]]]]]
keyword[def] identifier[register_admin_models] ( identifier[admin_site] ): literal[string] keyword[global] identifier[__MODELS_REGISTRY] identifier[prefs] = identifier[get_prefs] () keyword[for] identifier[app_label] , identifier[prefs_items] keyword[in] identifier[prefs] . identifier[items] (): identifier[model_class] = identifier[get_pref_model_class] ( identifier[app_label] , identifier[prefs_items] , identifier[get_app_prefs] ) keyword[if] identifier[model_class] keyword[is] keyword[not] keyword[None] : identifier[__MODELS_REGISTRY] [ identifier[app_label] ]= identifier[model_class] identifier[admin_site] . identifier[register] ( identifier[model_class] , identifier[get_pref_model_admin_class] ( identifier[prefs_items] ))
def register_admin_models(admin_site): """Registers dynamically created preferences models for Admin interface. :param admin.AdminSite admin_site: AdminSite object. """ global __MODELS_REGISTRY prefs = get_prefs() for (app_label, prefs_items) in prefs.items(): model_class = get_pref_model_class(app_label, prefs_items, get_app_prefs) if model_class is not None: __MODELS_REGISTRY[app_label] = model_class admin_site.register(model_class, get_pref_model_admin_class(prefs_items)) # depends on [control=['if'], data=['model_class']] # depends on [control=['for'], data=[]]
def do_grep(self, path, match): """ grep's work horse """ try: children = self.get_children(path) except (NoNodeError, NoAuthError): children = [] for child in children: full_path = os.path.join(path, child) try: value, _ = self.get(full_path) except (NoNodeError, NoAuthError): value = "" if value is not None: matches = [line for line in value.split("\n") if match.search(line)] if len(matches) > 0: yield (full_path, matches) for mpath, matches in self.do_grep(full_path, match): yield (mpath, matches)
def function[do_grep, parameter[self, path, match]]: constant[ grep's work horse ] <ast.Try object at 0x7da20c9920e0> for taget[name[child]] in starred[name[children]] begin[:] variable[full_path] assign[=] call[name[os].path.join, parameter[name[path], name[child]]] <ast.Try object at 0x7da18f00f430> if compare[name[value] is_not constant[None]] begin[:] variable[matches] assign[=] <ast.ListComp object at 0x7da18f00e650> if compare[call[name[len], parameter[name[matches]]] greater[>] constant[0]] begin[:] <ast.Yield object at 0x7da18f00fb50> for taget[tuple[[<ast.Name object at 0x7da18f00ee30>, <ast.Name object at 0x7da18f00d150>]]] in starred[call[name[self].do_grep, parameter[name[full_path], name[match]]]] begin[:] <ast.Yield object at 0x7da18f00e140>
keyword[def] identifier[do_grep] ( identifier[self] , identifier[path] , identifier[match] ): literal[string] keyword[try] : identifier[children] = identifier[self] . identifier[get_children] ( identifier[path] ) keyword[except] ( identifier[NoNodeError] , identifier[NoAuthError] ): identifier[children] =[] keyword[for] identifier[child] keyword[in] identifier[children] : identifier[full_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[child] ) keyword[try] : identifier[value] , identifier[_] = identifier[self] . identifier[get] ( identifier[full_path] ) keyword[except] ( identifier[NoNodeError] , identifier[NoAuthError] ): identifier[value] = literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[matches] =[ identifier[line] keyword[for] identifier[line] keyword[in] identifier[value] . identifier[split] ( literal[string] ) keyword[if] identifier[match] . identifier[search] ( identifier[line] )] keyword[if] identifier[len] ( identifier[matches] )> literal[int] : keyword[yield] ( identifier[full_path] , identifier[matches] ) keyword[for] identifier[mpath] , identifier[matches] keyword[in] identifier[self] . identifier[do_grep] ( identifier[full_path] , identifier[match] ): keyword[yield] ( identifier[mpath] , identifier[matches] )
def do_grep(self, path, match): """ grep's work horse """ try: children = self.get_children(path) # depends on [control=['try'], data=[]] except (NoNodeError, NoAuthError): children = [] # depends on [control=['except'], data=[]] for child in children: full_path = os.path.join(path, child) try: (value, _) = self.get(full_path) # depends on [control=['try'], data=[]] except (NoNodeError, NoAuthError): value = '' # depends on [control=['except'], data=[]] if value is not None: matches = [line for line in value.split('\n') if match.search(line)] if len(matches) > 0: yield (full_path, matches) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['value']] for (mpath, matches) in self.do_grep(full_path, match): yield (mpath, matches) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['child']]
def depth(args): """ %prog depth DP.tsv Plot read depths across all TREDs. """ import seaborn as sns p = OptionParser(depth.__doc__) opts, args, iopts = p.set_image_options(args, figsize="14x14") if len(args) != 1: sys.exit(not p.print_help()) tsvfile, = args fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(ncols=2, nrows=2, figsize=(iopts.w, iopts.h)) plt.tight_layout(pad=6) data = pd.read_csv(tsvfile, sep="\t", low_memory=False) ids, treds = read_treds() for (dp, ax, title) in zip(("FDP", "PDP", "RDP", "PEDP"), (ax1, ax2, ax3, ax4), ("Spanning reads", "Partial reads", "Repeat-only reads", "Paired-end reads")): logging.debug("Build {}".format(title)) # Construct related data structure xd = [] # (tred, dp) mdp = [] # (tred, median_dp) for tred, motif in zip(treds["abbreviation"], treds["motif"]): if tred in ignore: logging.debug("Ignore {}".format(tred)) continue if len(motif) > 4: if "/" in motif: # CTG/CAG motif = motif.split("/")[0] else: motif = motif[:4] + ".." xtred = "{} {}".format(tred, motif) md = [x for x in data[tred + '.' + dp] if x >= 0] subsample = 10000 if dp == "RDP" else 1000 md = sample(md, subsample) pmd = [x for x in md if x > 0] median = np.median(pmd) if pmd else 0 mdp.append((xtred, median)) for d in md: xd.append((xtred, d)) # Determine order mdp.sort(key=lambda x: x[1]) order, mdp = zip(*mdp) # OK, now plot xt, xd = zip(*xd) sns.boxplot(xt, xd, ax=ax, order=order, fliersize=2) xticklabels = ax.get_xticklabels() ax.set_xticklabels(xticklabels, rotation=45, ha="right") ax.set_title("Number of {} per locus".format(title), size=18) ylim = 30 if dp == "RDP" else 100 ax.set_ylim(0, ylim) yticklabels = [int(x) for x in ax.get_yticks()] ax.set_yticklabels(yticklabels, family='Helvetica', size=14) root = fig.add_axes([0, 0, 1, 1]) pad = .04 panel_labels(root, ((pad, 1 - pad, "A"), (1 / 2. + pad / 2, 1 - pad, "B"), (pad, .5 - pad / 2, "C"), (1 / 2. + pad / 2, .5 - pad / 2, "D"))) normalize_axes(root) image_name = "depth." + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts)
def function[depth, parameter[args]]: constant[ %prog depth DP.tsv Plot read depths across all TREDs. ] import module[seaborn] as alias[sns] variable[p] assign[=] call[name[OptionParser], parameter[name[depth].__doc__]] <ast.Tuple object at 0x7da1b084c370> assign[=] call[name[p].set_image_options, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[1]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b084ffd0>]] <ast.Tuple object at 0x7da1b084feb0> assign[=] name[args] <ast.Tuple object at 0x7da1b084fdf0> assign[=] call[name[plt].subplots, parameter[]] call[name[plt].tight_layout, parameter[]] variable[data] assign[=] call[name[pd].read_csv, parameter[name[tsvfile]]] <ast.Tuple object at 0x7da1b084f670> assign[=] call[name[read_treds], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b084f520>, <ast.Name object at 0x7da1b084f4f0>, <ast.Name object at 0x7da1b084f4c0>]]] in starred[call[name[zip], parameter[tuple[[<ast.Constant object at 0x7da1b084f400>, <ast.Constant object at 0x7da1b084f3d0>, <ast.Constant object at 0x7da1b084f3a0>, <ast.Constant object at 0x7da1b084f370>]], tuple[[<ast.Name object at 0x7da1b084f310>, <ast.Name object at 0x7da1b084f2e0>, <ast.Name object at 0x7da1b084f2b0>, <ast.Name object at 0x7da1b084f280>]], tuple[[<ast.Constant object at 0x7da1b084f220>, <ast.Constant object at 0x7da1b084f1f0>, <ast.Constant object at 0x7da1b084f1c0>, <ast.Constant object at 0x7da1b084f190>]]]]] begin[:] call[name[logging].debug, parameter[call[constant[Build {}].format, parameter[name[title]]]]] variable[xd] assign[=] list[[]] variable[mdp] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b084ee30>, <ast.Name object at 0x7da1b084ee00>]]] in starred[call[name[zip], parameter[call[name[treds]][constant[abbreviation]], call[name[treds]][constant[motif]]]]] begin[:] if compare[name[tred] in name[ignore]] begin[:] call[name[logging].debug, parameter[call[constant[Ignore {}].format, parameter[name[tred]]]]] continue if compare[call[name[len], parameter[name[motif]]] greater[>] constant[4]] begin[:] if compare[constant[/] in name[motif]] begin[:] variable[motif] assign[=] call[call[name[motif].split, parameter[constant[/]]]][constant[0]] variable[xtred] assign[=] call[constant[{} {}].format, parameter[name[tred], name[motif]]] variable[md] assign[=] <ast.ListComp object at 0x7da1b084e2f0> variable[subsample] assign[=] <ast.IfExp object at 0x7da1b084dfc0> variable[md] assign[=] call[name[sample], parameter[name[md], name[subsample]]] variable[pmd] assign[=] <ast.ListComp object at 0x7da1b084dd20> variable[median] assign[=] <ast.IfExp object at 0x7da1b084db10> call[name[mdp].append, parameter[tuple[[<ast.Name object at 0x7da1b084d8d0>, <ast.Name object at 0x7da1b084d8a0>]]]] for taget[name[d]] in starred[name[md]] begin[:] call[name[xd].append, parameter[tuple[[<ast.Name object at 0x7da1b084d6c0>, <ast.Name object at 0x7da1b084d690>]]]] call[name[mdp].sort, parameter[]] <ast.Tuple object at 0x7da1b084d3c0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b084c790>]] <ast.Tuple object at 0x7da1b084c820> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b084c910>]] call[name[sns].boxplot, parameter[name[xt], name[xd]]] variable[xticklabels] assign[=] call[name[ax].get_xticklabels, parameter[]] call[name[ax].set_xticklabels, parameter[name[xticklabels]]] call[name[ax].set_title, parameter[call[constant[Number of {} per locus].format, parameter[name[title]]]]] variable[ylim] assign[=] <ast.IfExp object at 0x7da1b084d120> call[name[ax].set_ylim, parameter[constant[0], name[ylim]]] variable[yticklabels] assign[=] <ast.ListComp object at 0x7da18f720340> call[name[ax].set_yticklabels, parameter[name[yticklabels]]] variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da18f720c70>, <ast.Constant object at 0x7da18f722a40>, <ast.Constant object at 0x7da18f723040>, <ast.Constant object at 0x7da18f722b90>]]]] variable[pad] assign[=] constant[0.04] call[name[panel_labels], parameter[name[root], tuple[[<ast.Tuple object at 0x7da18f7205e0>, <ast.Tuple object at 0x7da18f7217e0>, <ast.Tuple object at 0x7da18f7215a0>, <ast.Tuple object at 0x7da18f722020>]]]] call[name[normalize_axes], parameter[name[root]]] variable[image_name] assign[=] binary_operation[constant[depth.] + name[iopts].format] call[name[savefig], parameter[name[image_name]]]
keyword[def] identifier[depth] ( identifier[args] ): literal[string] keyword[import] identifier[seaborn] keyword[as] identifier[sns] identifier[p] = identifier[OptionParser] ( identifier[depth] . identifier[__doc__] ) identifier[opts] , identifier[args] , identifier[iopts] = identifier[p] . identifier[set_image_options] ( identifier[args] , identifier[figsize] = literal[string] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[tsvfile] ,= identifier[args] identifier[fig] ,(( identifier[ax1] , identifier[ax2] ),( identifier[ax3] , identifier[ax4] ))= identifier[plt] . identifier[subplots] ( identifier[ncols] = literal[int] , identifier[nrows] = literal[int] , identifier[figsize] =( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] )) identifier[plt] . identifier[tight_layout] ( identifier[pad] = literal[int] ) identifier[data] = identifier[pd] . identifier[read_csv] ( identifier[tsvfile] , identifier[sep] = literal[string] , identifier[low_memory] = keyword[False] ) identifier[ids] , identifier[treds] = identifier[read_treds] () keyword[for] ( identifier[dp] , identifier[ax] , identifier[title] ) keyword[in] identifier[zip] (( literal[string] , literal[string] , literal[string] , literal[string] ), ( identifier[ax1] , identifier[ax2] , identifier[ax3] , identifier[ax4] ), ( literal[string] , literal[string] , literal[string] , literal[string] )): identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[title] )) identifier[xd] =[] identifier[mdp] =[] keyword[for] identifier[tred] , identifier[motif] keyword[in] identifier[zip] ( identifier[treds] [ literal[string] ], identifier[treds] [ literal[string] ]): keyword[if] identifier[tred] keyword[in] identifier[ignore] : identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[tred] )) keyword[continue] keyword[if] identifier[len] ( identifier[motif] )> literal[int] : keyword[if] literal[string] keyword[in] identifier[motif] : identifier[motif] = identifier[motif] . identifier[split] ( literal[string] )[ literal[int] ] keyword[else] : identifier[motif] = identifier[motif] [: literal[int] ]+ literal[string] identifier[xtred] = literal[string] . identifier[format] ( identifier[tred] , identifier[motif] ) identifier[md] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[data] [ identifier[tred] + literal[string] + identifier[dp] ] keyword[if] identifier[x] >= literal[int] ] identifier[subsample] = literal[int] keyword[if] identifier[dp] == literal[string] keyword[else] literal[int] identifier[md] = identifier[sample] ( identifier[md] , identifier[subsample] ) identifier[pmd] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[md] keyword[if] identifier[x] > literal[int] ] identifier[median] = identifier[np] . identifier[median] ( identifier[pmd] ) keyword[if] identifier[pmd] keyword[else] literal[int] identifier[mdp] . identifier[append] (( identifier[xtred] , identifier[median] )) keyword[for] identifier[d] keyword[in] identifier[md] : identifier[xd] . identifier[append] (( identifier[xtred] , identifier[d] )) identifier[mdp] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) identifier[order] , identifier[mdp] = identifier[zip] (* identifier[mdp] ) identifier[xt] , identifier[xd] = identifier[zip] (* identifier[xd] ) identifier[sns] . identifier[boxplot] ( identifier[xt] , identifier[xd] , identifier[ax] = identifier[ax] , identifier[order] = identifier[order] , identifier[fliersize] = literal[int] ) identifier[xticklabels] = identifier[ax] . identifier[get_xticklabels] () identifier[ax] . identifier[set_xticklabels] ( identifier[xticklabels] , identifier[rotation] = literal[int] , identifier[ha] = literal[string] ) identifier[ax] . identifier[set_title] ( literal[string] . identifier[format] ( identifier[title] ), identifier[size] = literal[int] ) identifier[ylim] = literal[int] keyword[if] identifier[dp] == literal[string] keyword[else] literal[int] identifier[ax] . identifier[set_ylim] ( literal[int] , identifier[ylim] ) identifier[yticklabels] =[ identifier[int] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[ax] . identifier[get_yticks] ()] identifier[ax] . identifier[set_yticklabels] ( identifier[yticklabels] , identifier[family] = literal[string] , identifier[size] = literal[int] ) identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[pad] = literal[int] identifier[panel_labels] ( identifier[root] ,(( identifier[pad] , literal[int] - identifier[pad] , literal[string] ),( literal[int] / literal[int] + identifier[pad] / literal[int] , literal[int] - identifier[pad] , literal[string] ), ( identifier[pad] , literal[int] - identifier[pad] / literal[int] , literal[string] ),( literal[int] / literal[int] + identifier[pad] / literal[int] , literal[int] - identifier[pad] / literal[int] , literal[string] ))) identifier[normalize_axes] ( identifier[root] ) identifier[image_name] = literal[string] + identifier[iopts] . identifier[format] identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[iopts] . identifier[dpi] , identifier[iopts] = identifier[iopts] )
def depth(args): """ %prog depth DP.tsv Plot read depths across all TREDs. """ import seaborn as sns p = OptionParser(depth.__doc__) (opts, args, iopts) = p.set_image_options(args, figsize='14x14') if len(args) != 1: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (tsvfile,) = args (fig, ((ax1, ax2), (ax3, ax4))) = plt.subplots(ncols=2, nrows=2, figsize=(iopts.w, iopts.h)) plt.tight_layout(pad=6) data = pd.read_csv(tsvfile, sep='\t', low_memory=False) (ids, treds) = read_treds() for (dp, ax, title) in zip(('FDP', 'PDP', 'RDP', 'PEDP'), (ax1, ax2, ax3, ax4), ('Spanning reads', 'Partial reads', 'Repeat-only reads', 'Paired-end reads')): logging.debug('Build {}'.format(title)) # Construct related data structure xd = [] # (tred, dp) mdp = [] # (tred, median_dp) for (tred, motif) in zip(treds['abbreviation'], treds['motif']): if tred in ignore: logging.debug('Ignore {}'.format(tred)) continue # depends on [control=['if'], data=['tred']] if len(motif) > 4: if '/' in motif: # CTG/CAG motif = motif.split('/')[0] # depends on [control=['if'], data=['motif']] else: motif = motif[:4] + '..' # depends on [control=['if'], data=[]] xtred = '{} {}'.format(tred, motif) md = [x for x in data[tred + '.' + dp] if x >= 0] subsample = 10000 if dp == 'RDP' else 1000 md = sample(md, subsample) pmd = [x for x in md if x > 0] median = np.median(pmd) if pmd else 0 mdp.append((xtred, median)) for d in md: xd.append((xtred, d)) # depends on [control=['for'], data=['d']] # depends on [control=['for'], data=[]] # Determine order mdp.sort(key=lambda x: x[1]) (order, mdp) = zip(*mdp) # OK, now plot (xt, xd) = zip(*xd) sns.boxplot(xt, xd, ax=ax, order=order, fliersize=2) xticklabels = ax.get_xticklabels() ax.set_xticklabels(xticklabels, rotation=45, ha='right') ax.set_title('Number of {} per locus'.format(title), size=18) ylim = 30 if dp == 'RDP' else 100 ax.set_ylim(0, ylim) yticklabels = [int(x) for x in ax.get_yticks()] ax.set_yticklabels(yticklabels, family='Helvetica', size=14) # depends on [control=['for'], data=[]] root = fig.add_axes([0, 0, 1, 1]) pad = 0.04 panel_labels(root, ((pad, 1 - pad, 'A'), (1 / 2.0 + pad / 2, 1 - pad, 'B'), (pad, 0.5 - pad / 2, 'C'), (1 / 2.0 + pad / 2, 0.5 - pad / 2, 'D'))) normalize_axes(root) image_name = 'depth.' + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts)
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: FieldTypeContext for this FieldTypeInstance :rtype: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext """ if self._context is None: self._context = FieldTypeContext( self._version, assistant_sid=self._solution['assistant_sid'], sid=self._solution['sid'], ) return self._context
def function[_proxy, parameter[self]]: constant[ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: FieldTypeContext for this FieldTypeInstance :rtype: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext ] if compare[name[self]._context is constant[None]] begin[:] name[self]._context assign[=] call[name[FieldTypeContext], parameter[name[self]._version]] return[name[self]._context]
keyword[def] identifier[_proxy] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] : identifier[self] . identifier[_context] = identifier[FieldTypeContext] ( identifier[self] . identifier[_version] , identifier[assistant_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ], ) keyword[return] identifier[self] . identifier[_context]
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: FieldTypeContext for this FieldTypeInstance :rtype: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext """ if self._context is None: self._context = FieldTypeContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]] return self._context
def startTimer(self): """Starts the timer for this source""" self.td = self.t.start(self.inter) if self.use_ssh and self.ssh_connector: self.ssh_client.connect()
def function[startTimer, parameter[self]]: constant[Starts the timer for this source] name[self].td assign[=] call[name[self].t.start, parameter[name[self].inter]] if <ast.BoolOp object at 0x7da2044c2980> begin[:] call[name[self].ssh_client.connect, parameter[]]
keyword[def] identifier[startTimer] ( identifier[self] ): literal[string] identifier[self] . identifier[td] = identifier[self] . identifier[t] . identifier[start] ( identifier[self] . identifier[inter] ) keyword[if] identifier[self] . identifier[use_ssh] keyword[and] identifier[self] . identifier[ssh_connector] : identifier[self] . identifier[ssh_client] . identifier[connect] ()
def startTimer(self): """Starts the timer for this source""" self.td = self.t.start(self.inter) if self.use_ssh and self.ssh_connector: self.ssh_client.connect() # depends on [control=['if'], data=[]]
def load_js(abspath, default=dict(), compress=False, enable_verbose=True): """Load Json from file. If file are not exists, returns ``default``. :param abspath: File path. Use absolute path as much as you can. File extension has to be ``.json`` or ``.gz``. (for compressed Json) :type abspath: string :param default: (default dict()) If ``abspath`` not exists, return the default Python object instead. :param compress: (default False) Load from a gzip compressed Json file. Check :func:`dump_js()<dump_js>` function for more information. :type compress: boolean :param enable_verbose: (default True) Trigger for message. :type enable_verbose: boolean Usage:: >>> from weatherlab.lib.dataIO.js import load_js >>> load_js("test.json") # if you have a json file Loading from test.json... Complete! Elapse 0.000432 sec. {'a': 1, 'b': 2} **中文文档** 从Json文件中读取数据 参数列表 :param abspath: 文件路径, 扩展名需为 ``.json`` 或 ``.gz`` :type abspath: ``字符串`` :param default: (默认 dict()) 如果文件路径不存在, 则会返回一个默认的Python对象。 :param compress: (默认 False) 是否从一个gzip压缩过的Json文件中读取数据。 请 参考 :func:`dump_js()<dump_js>` 获得更多信息. :type compress: ``布尔值`` :param enable_verbose: (默认 True) 是否打开信息提示开关, 批处理时建议关闭. :type enable_verbose: ``布尔值`` """ abspath = str(abspath) # try stringlize if compress: # check extension name if os.path.splitext(abspath)[1] != ".gz": raise Exception("compressed json has to use extension '.gz'!") else: if os.path.splitext(abspath)[1] != ".json": raise Exception("file extension are not '.json'!") if enable_verbose: print("\nLoading from %s..." % abspath) st = time.clock() if os.path.exists(abspath): # exists, then load if compress: with gzip.open(abspath, "rb") as f: js = json.loads(f.read().decode("utf-8")) else: with open(abspath, "r") as f: js = json.load(f) if enable_verbose: print("\tComplete! Elapse %.6f sec." % (time.clock() - st) ) return js else: if enable_verbose: print("\t%s not exists! cannot load! Create an default object " "instead" % abspath) return default
def function[load_js, parameter[abspath, default, compress, enable_verbose]]: constant[Load Json from file. If file are not exists, returns ``default``. :param abspath: File path. Use absolute path as much as you can. File extension has to be ``.json`` or ``.gz``. (for compressed Json) :type abspath: string :param default: (default dict()) If ``abspath`` not exists, return the default Python object instead. :param compress: (default False) Load from a gzip compressed Json file. Check :func:`dump_js()<dump_js>` function for more information. :type compress: boolean :param enable_verbose: (default True) Trigger for message. :type enable_verbose: boolean Usage:: >>> from weatherlab.lib.dataIO.js import load_js >>> load_js("test.json") # if you have a json file Loading from test.json... Complete! Elapse 0.000432 sec. {'a': 1, 'b': 2} **中文文档** 从Json文件中读取数据 参数列表 :param abspath: 文件路径, 扩展名需为 ``.json`` 或 ``.gz`` :type abspath: ``字符串`` :param default: (默认 dict()) 如果文件路径不存在, 则会返回一个默认的Python对象。 :param compress: (默认 False) 是否从一个gzip压缩过的Json文件中读取数据。 请 参考 :func:`dump_js()<dump_js>` 获得更多信息. :type compress: ``布尔值`` :param enable_verbose: (默认 True) 是否打开信息提示开关, 批处理时建议关闭. :type enable_verbose: ``布尔值`` ] variable[abspath] assign[=] call[name[str], parameter[name[abspath]]] if name[compress] begin[:] if compare[call[call[name[os].path.splitext, parameter[name[abspath]]]][constant[1]] not_equal[!=] constant[.gz]] begin[:] <ast.Raise object at 0x7da1b1640850> if name[enable_verbose] begin[:] call[name[print], parameter[binary_operation[constant[ Loading from %s...] <ast.Mod object at 0x7da2590d6920> name[abspath]]]] variable[st] assign[=] call[name[time].clock, parameter[]] if call[name[os].path.exists, parameter[name[abspath]]] begin[:] if name[compress] begin[:] with call[name[gzip].open, parameter[name[abspath], constant[rb]]] begin[:] variable[js] assign[=] call[name[json].loads, parameter[call[call[name[f].read, parameter[]].decode, parameter[constant[utf-8]]]]] if name[enable_verbose] begin[:] call[name[print], parameter[binary_operation[constant[ Complete! Elapse %.6f sec.] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[time].clock, parameter[]] - name[st]]]]] return[name[js]]
keyword[def] identifier[load_js] ( identifier[abspath] , identifier[default] = identifier[dict] (), identifier[compress] = keyword[False] , identifier[enable_verbose] = keyword[True] ): literal[string] identifier[abspath] = identifier[str] ( identifier[abspath] ) keyword[if] identifier[compress] : keyword[if] identifier[os] . identifier[path] . identifier[splitext] ( identifier[abspath] )[ literal[int] ]!= literal[string] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[else] : keyword[if] identifier[os] . identifier[path] . identifier[splitext] ( identifier[abspath] )[ literal[int] ]!= literal[string] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[enable_verbose] : identifier[print] ( literal[string] % identifier[abspath] ) identifier[st] = identifier[time] . identifier[clock] () keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[abspath] ): keyword[if] identifier[compress] : keyword[with] identifier[gzip] . identifier[open] ( identifier[abspath] , literal[string] ) keyword[as] identifier[f] : identifier[js] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] (). identifier[decode] ( literal[string] )) keyword[else] : keyword[with] identifier[open] ( identifier[abspath] , literal[string] ) keyword[as] identifier[f] : identifier[js] = identifier[json] . identifier[load] ( identifier[f] ) keyword[if] identifier[enable_verbose] : identifier[print] ( literal[string] %( identifier[time] . identifier[clock] ()- identifier[st] )) keyword[return] identifier[js] keyword[else] : keyword[if] identifier[enable_verbose] : identifier[print] ( literal[string] literal[string] % identifier[abspath] ) keyword[return] identifier[default]
def load_js(abspath, default=dict(), compress=False, enable_verbose=True): """Load Json from file. If file are not exists, returns ``default``. :param abspath: File path. Use absolute path as much as you can. File extension has to be ``.json`` or ``.gz``. (for compressed Json) :type abspath: string :param default: (default dict()) If ``abspath`` not exists, return the default Python object instead. :param compress: (default False) Load from a gzip compressed Json file. Check :func:`dump_js()<dump_js>` function for more information. :type compress: boolean :param enable_verbose: (default True) Trigger for message. :type enable_verbose: boolean Usage:: >>> from weatherlab.lib.dataIO.js import load_js >>> load_js("test.json") # if you have a json file Loading from test.json... Complete! Elapse 0.000432 sec. {'a': 1, 'b': 2} **中文文档** 从Json文件中读取数据 参数列表 :param abspath: 文件路径, 扩展名需为 ``.json`` 或 ``.gz`` :type abspath: ``字符串`` :param default: (默认 dict()) 如果文件路径不存在, 则会返回一个默认的Python对象。 :param compress: (默认 False) 是否从一个gzip压缩过的Json文件中读取数据。 请 参考 :func:`dump_js()<dump_js>` 获得更多信息. :type compress: ``布尔值`` :param enable_verbose: (默认 True) 是否打开信息提示开关, 批处理时建议关闭. :type enable_verbose: ``布尔值`` """ abspath = str(abspath) # try stringlize if compress: # check extension name if os.path.splitext(abspath)[1] != '.gz': raise Exception("compressed json has to use extension '.gz'!") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif os.path.splitext(abspath)[1] != '.json': raise Exception("file extension are not '.json'!") # depends on [control=['if'], data=[]] if enable_verbose: print('\nLoading from %s...' % abspath) st = time.clock() # depends on [control=['if'], data=[]] if os.path.exists(abspath): # exists, then load if compress: with gzip.open(abspath, 'rb') as f: js = json.loads(f.read().decode('utf-8')) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] else: with open(abspath, 'r') as f: js = json.load(f) # depends on [control=['with'], data=['f']] if enable_verbose: print('\tComplete! Elapse %.6f sec.' % (time.clock() - st)) # depends on [control=['if'], data=[]] return js # depends on [control=['if'], data=[]] else: if enable_verbose: print('\t%s not exists! cannot load! Create an default object instead' % abspath) # depends on [control=['if'], data=[]] return default
def compute_evolution_by_frequency( df, id_cols: List[str], date_col: Union[str, Dict[str, str]], value_col: str, freq=1, method: str = 'abs', format: str = 'column', offseted_suffix: str = '_offseted', evolution_col_name: str = 'evolution_computed', missing_date_as_zero: bool = False, raise_duplicate_error: bool = True ): """ This function answers the question: how has a value changed on a weekly, monthly, yearly basis ? --- ### Parameters *mandatory :* - `id_cols` (*list*): name of the columns used to create each group. - `date_col` (*str or dict*): either directly the name of the column containing the date or a dictionary with: - `selector` (*str*): the name of the column - `format` (*str*): the format of the date (see [pandas doc]( https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior)) - `value_col` (*str*): name of the column containing the value to compare. *optional :* - `freq` (*int/pd.DateOffset/pd.Serie/dict*): the frequency at which we calculate evolutions - `method` (*str*): either `"abs"` for absolute values or `"pct"` for the evolution in percentage of previous value. - `offseted_suffix` (*str*): suffix of the offseted column. By default, `"_offseted"`. - `evolution_col_name` (*str*): name given to the evolution column. By default, `"evolution_computed"`. - `missing_date_as_zero` (*boolean*): add missing date with zero value. - `raise_duplicate_error` (*boolean*): raise an error when the dataset has duplicated values with the given `id_cols`. - `format` (*str*): `'df'` # Do not change it !!! --- ### Example **Input** | id_cols | value_col | date_col| |:---------:|:------------:|:----------:| | A | 20 | 2010| | | 7 | 2011| | B | 200 | 2010| | | 220 | 2011| | C | 100 | 2011| ```cson compute_evolution_by_frequency: id_cols: "id_cols" date_col: "date_col" value_col: "value_col" ``` **Output** | id_cols | value_col | date_col| evolution| |:---------:|:------------:|:----------:|:---------:| | A | 20 | 2010| null| | | 7 | 2011| -13| | B | 200 | 2010| null| | | 220 | 2011| 20| | C | 100 | 2011| null| """ if missing_date_as_zero: how = 'outer' fillna = 0 else: how = 'left' fillna = None return __compute_evolution( df=df, id_cols=id_cols, value_col=value_col, date_col=date_col, freq=freq, method=method, format=format, offseted_suffix=offseted_suffix, evolution_col_name=evolution_col_name, how=how, fillna=fillna, raise_duplicate_error=raise_duplicate_error )
def function[compute_evolution_by_frequency, parameter[df, id_cols, date_col, value_col, freq, method, format, offseted_suffix, evolution_col_name, missing_date_as_zero, raise_duplicate_error]]: constant[ This function answers the question: how has a value changed on a weekly, monthly, yearly basis ? --- ### Parameters *mandatory :* - `id_cols` (*list*): name of the columns used to create each group. - `date_col` (*str or dict*): either directly the name of the column containing the date or a dictionary with: - `selector` (*str*): the name of the column - `format` (*str*): the format of the date (see [pandas doc]( https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior)) - `value_col` (*str*): name of the column containing the value to compare. *optional :* - `freq` (*int/pd.DateOffset/pd.Serie/dict*): the frequency at which we calculate evolutions - `method` (*str*): either `"abs"` for absolute values or `"pct"` for the evolution in percentage of previous value. - `offseted_suffix` (*str*): suffix of the offseted column. By default, `"_offseted"`. - `evolution_col_name` (*str*): name given to the evolution column. By default, `"evolution_computed"`. - `missing_date_as_zero` (*boolean*): add missing date with zero value. - `raise_duplicate_error` (*boolean*): raise an error when the dataset has duplicated values with the given `id_cols`. - `format` (*str*): `'df'` # Do not change it !!! --- ### Example **Input** | id_cols | value_col | date_col| |:---------:|:------------:|:----------:| | A | 20 | 2010| | | 7 | 2011| | B | 200 | 2010| | | 220 | 2011| | C | 100 | 2011| ```cson compute_evolution_by_frequency: id_cols: "id_cols" date_col: "date_col" value_col: "value_col" ``` **Output** | id_cols | value_col | date_col| evolution| |:---------:|:------------:|:----------:|:---------:| | A | 20 | 2010| null| | | 7 | 2011| -13| | B | 200 | 2010| null| | | 220 | 2011| 20| | C | 100 | 2011| null| ] if name[missing_date_as_zero] begin[:] variable[how] assign[=] constant[outer] variable[fillna] assign[=] constant[0] return[call[name[__compute_evolution], parameter[]]]
keyword[def] identifier[compute_evolution_by_frequency] ( identifier[df] , identifier[id_cols] : identifier[List] [ identifier[str] ], identifier[date_col] : identifier[Union] [ identifier[str] , identifier[Dict] [ identifier[str] , identifier[str] ]], identifier[value_col] : identifier[str] , identifier[freq] = literal[int] , identifier[method] : identifier[str] = literal[string] , identifier[format] : identifier[str] = literal[string] , identifier[offseted_suffix] : identifier[str] = literal[string] , identifier[evolution_col_name] : identifier[str] = literal[string] , identifier[missing_date_as_zero] : identifier[bool] = keyword[False] , identifier[raise_duplicate_error] : identifier[bool] = keyword[True] ): literal[string] keyword[if] identifier[missing_date_as_zero] : identifier[how] = literal[string] identifier[fillna] = literal[int] keyword[else] : identifier[how] = literal[string] identifier[fillna] = keyword[None] keyword[return] identifier[__compute_evolution] ( identifier[df] = identifier[df] , identifier[id_cols] = identifier[id_cols] , identifier[value_col] = identifier[value_col] , identifier[date_col] = identifier[date_col] , identifier[freq] = identifier[freq] , identifier[method] = identifier[method] , identifier[format] = identifier[format] , identifier[offseted_suffix] = identifier[offseted_suffix] , identifier[evolution_col_name] = identifier[evolution_col_name] , identifier[how] = identifier[how] , identifier[fillna] = identifier[fillna] , identifier[raise_duplicate_error] = identifier[raise_duplicate_error] )
def compute_evolution_by_frequency(df, id_cols: List[str], date_col: Union[str, Dict[str, str]], value_col: str, freq=1, method: str='abs', format: str='column', offseted_suffix: str='_offseted', evolution_col_name: str='evolution_computed', missing_date_as_zero: bool=False, raise_duplicate_error: bool=True): """ This function answers the question: how has a value changed on a weekly, monthly, yearly basis ? --- ### Parameters *mandatory :* - `id_cols` (*list*): name of the columns used to create each group. - `date_col` (*str or dict*): either directly the name of the column containing the date or a dictionary with: - `selector` (*str*): the name of the column - `format` (*str*): the format of the date (see [pandas doc]( https://docs.python.org/3/library/datetime.html#strftime-and-strptime-behavior)) - `value_col` (*str*): name of the column containing the value to compare. *optional :* - `freq` (*int/pd.DateOffset/pd.Serie/dict*): the frequency at which we calculate evolutions - `method` (*str*): either `"abs"` for absolute values or `"pct"` for the evolution in percentage of previous value. - `offseted_suffix` (*str*): suffix of the offseted column. By default, `"_offseted"`. - `evolution_col_name` (*str*): name given to the evolution column. By default, `"evolution_computed"`. - `missing_date_as_zero` (*boolean*): add missing date with zero value. - `raise_duplicate_error` (*boolean*): raise an error when the dataset has duplicated values with the given `id_cols`. - `format` (*str*): `'df'` # Do not change it !!! --- ### Example **Input** | id_cols | value_col | date_col| |:---------:|:------------:|:----------:| | A | 20 | 2010| | | 7 | 2011| | B | 200 | 2010| | | 220 | 2011| | C | 100 | 2011| ```cson compute_evolution_by_frequency: id_cols: "id_cols" date_col: "date_col" value_col: "value_col" ``` **Output** | id_cols | value_col | date_col| evolution| |:---------:|:------------:|:----------:|:---------:| | A | 20 | 2010| null| | | 7 | 2011| -13| | B | 200 | 2010| null| | | 220 | 2011| 20| | C | 100 | 2011| null| """ if missing_date_as_zero: how = 'outer' fillna = 0 # depends on [control=['if'], data=[]] else: how = 'left' fillna = None return __compute_evolution(df=df, id_cols=id_cols, value_col=value_col, date_col=date_col, freq=freq, method=method, format=format, offseted_suffix=offseted_suffix, evolution_col_name=evolution_col_name, how=how, fillna=fillna, raise_duplicate_error=raise_duplicate_error)
def GetMessages(self, formatter_mediator, event): """Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) event_values = event.CopyToDict() page_transition_type = event_values.get('page_transition_type', None) if page_transition_type is not None: page_transition, page_transition_long = self._PAGE_TRANSITIONS.get( page_transition_type, self._UNKNOWN_PAGE_TRANSITION) if page_transition_long: event_values['page_transition'] = '{0:s} - {1:s}'.format( page_transition, page_transition_long) else: event_values['page_transition'] = page_transition visit_source = event_values.get('visit_source', None) if visit_source is not None: event_values['visit_source'] = self._VISIT_SOURCE.get( visit_source, 'UNKNOWN') extras = [] url_hidden = event_values.get('url_hidden', False) if url_hidden: extras.append('(url hidden)') typed_count = event_values.get('typed_count', 0) if typed_count == 0: extras.append('(URL not typed directly - no typed count)') elif typed_count == 1: extras.append('(type count {0:d} time)'.format(typed_count)) else: extras.append('(type count {0:d} times)'.format(typed_count)) event_values['extra'] = ' '.join(extras) return self._ConditionalFormatMessages(event_values)
def function[GetMessages, parameter[self, formatter_mediator, event]]: constant[Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. ] if compare[name[self].DATA_TYPE not_equal[!=] name[event].data_type] begin[:] <ast.Raise object at 0x7da18fe93b50> variable[event_values] assign[=] call[name[event].CopyToDict, parameter[]] variable[page_transition_type] assign[=] call[name[event_values].get, parameter[constant[page_transition_type], constant[None]]] if compare[name[page_transition_type] is_not constant[None]] begin[:] <ast.Tuple object at 0x7da2041d90f0> assign[=] call[name[self]._PAGE_TRANSITIONS.get, parameter[name[page_transition_type], name[self]._UNKNOWN_PAGE_TRANSITION]] if name[page_transition_long] begin[:] call[name[event_values]][constant[page_transition]] assign[=] call[constant[{0:s} - {1:s}].format, parameter[name[page_transition], name[page_transition_long]]] variable[visit_source] assign[=] call[name[event_values].get, parameter[constant[visit_source], constant[None]]] if compare[name[visit_source] is_not constant[None]] begin[:] call[name[event_values]][constant[visit_source]] assign[=] call[name[self]._VISIT_SOURCE.get, parameter[name[visit_source], constant[UNKNOWN]]] variable[extras] assign[=] list[[]] variable[url_hidden] assign[=] call[name[event_values].get, parameter[constant[url_hidden], constant[False]]] if name[url_hidden] begin[:] call[name[extras].append, parameter[constant[(url hidden)]]] variable[typed_count] assign[=] call[name[event_values].get, parameter[constant[typed_count], constant[0]]] if compare[name[typed_count] equal[==] constant[0]] begin[:] call[name[extras].append, parameter[constant[(URL not typed directly - no typed count)]]] call[name[event_values]][constant[extra]] assign[=] call[constant[ ].join, parameter[name[extras]]] return[call[name[self]._ConditionalFormatMessages, parameter[name[event_values]]]]
keyword[def] identifier[GetMessages] ( identifier[self] , identifier[formatter_mediator] , identifier[event] ): literal[string] keyword[if] identifier[self] . identifier[DATA_TYPE] != identifier[event] . identifier[data_type] : keyword[raise] identifier[errors] . identifier[WrongFormatter] ( literal[string] . identifier[format] ( identifier[event] . identifier[data_type] )) identifier[event_values] = identifier[event] . identifier[CopyToDict] () identifier[page_transition_type] = identifier[event_values] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[page_transition_type] keyword[is] keyword[not] keyword[None] : identifier[page_transition] , identifier[page_transition_long] = identifier[self] . identifier[_PAGE_TRANSITIONS] . identifier[get] ( identifier[page_transition_type] , identifier[self] . identifier[_UNKNOWN_PAGE_TRANSITION] ) keyword[if] identifier[page_transition_long] : identifier[event_values] [ literal[string] ]= literal[string] . identifier[format] ( identifier[page_transition] , identifier[page_transition_long] ) keyword[else] : identifier[event_values] [ literal[string] ]= identifier[page_transition] identifier[visit_source] = identifier[event_values] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[visit_source] keyword[is] keyword[not] keyword[None] : identifier[event_values] [ literal[string] ]= identifier[self] . identifier[_VISIT_SOURCE] . identifier[get] ( identifier[visit_source] , literal[string] ) identifier[extras] =[] identifier[url_hidden] = identifier[event_values] . identifier[get] ( literal[string] , keyword[False] ) keyword[if] identifier[url_hidden] : identifier[extras] . identifier[append] ( literal[string] ) identifier[typed_count] = identifier[event_values] . identifier[get] ( literal[string] , literal[int] ) keyword[if] identifier[typed_count] == literal[int] : identifier[extras] . identifier[append] ( literal[string] ) keyword[elif] identifier[typed_count] == literal[int] : identifier[extras] . identifier[append] ( literal[string] . identifier[format] ( identifier[typed_count] )) keyword[else] : identifier[extras] . identifier[append] ( literal[string] . identifier[format] ( identifier[typed_count] )) identifier[event_values] [ literal[string] ]= literal[string] . identifier[join] ( identifier[extras] ) keyword[return] identifier[self] . identifier[_ConditionalFormatMessages] ( identifier[event_values] )
def GetMessages(self, formatter_mediator, event): """Determines the formatted message strings for an event object. Args: formatter_mediator (FormatterMediator): mediates the interactions between formatters and other components, such as storage and Windows EventLog resources. event (EventObject): event. Returns: tuple(str, str): formatted message string and short message string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter. """ if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format(event.data_type)) # depends on [control=['if'], data=[]] event_values = event.CopyToDict() page_transition_type = event_values.get('page_transition_type', None) if page_transition_type is not None: (page_transition, page_transition_long) = self._PAGE_TRANSITIONS.get(page_transition_type, self._UNKNOWN_PAGE_TRANSITION) if page_transition_long: event_values['page_transition'] = '{0:s} - {1:s}'.format(page_transition, page_transition_long) # depends on [control=['if'], data=[]] else: event_values['page_transition'] = page_transition # depends on [control=['if'], data=['page_transition_type']] visit_source = event_values.get('visit_source', None) if visit_source is not None: event_values['visit_source'] = self._VISIT_SOURCE.get(visit_source, 'UNKNOWN') # depends on [control=['if'], data=['visit_source']] extras = [] url_hidden = event_values.get('url_hidden', False) if url_hidden: extras.append('(url hidden)') # depends on [control=['if'], data=[]] typed_count = event_values.get('typed_count', 0) if typed_count == 0: extras.append('(URL not typed directly - no typed count)') # depends on [control=['if'], data=[]] elif typed_count == 1: extras.append('(type count {0:d} time)'.format(typed_count)) # depends on [control=['if'], data=['typed_count']] else: extras.append('(type count {0:d} times)'.format(typed_count)) event_values['extra'] = ' '.join(extras) return self._ConditionalFormatMessages(event_values)
def set_or_edit_conditional_breakpoint(self): """Set conditional breakpoint""" if self.data: editor = self.get_current_editor() editor.debugger.toogle_breakpoint(edit_condition=True)
def function[set_or_edit_conditional_breakpoint, parameter[self]]: constant[Set conditional breakpoint] if name[self].data begin[:] variable[editor] assign[=] call[name[self].get_current_editor, parameter[]] call[name[editor].debugger.toogle_breakpoint, parameter[]]
keyword[def] identifier[set_or_edit_conditional_breakpoint] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[data] : identifier[editor] = identifier[self] . identifier[get_current_editor] () identifier[editor] . identifier[debugger] . identifier[toogle_breakpoint] ( identifier[edit_condition] = keyword[True] )
def set_or_edit_conditional_breakpoint(self): """Set conditional breakpoint""" if self.data: editor = self.get_current_editor() editor.debugger.toogle_breakpoint(edit_condition=True) # depends on [control=['if'], data=[]]
def delete(self, callback=None, errback=None): """ Delete the record from the zone, including all advanced configuration, meta data, etc. """ if not self.data: raise RecordException('record not loaded') def success(result, *args): if callback: return callback(result) else: return result return self._rest.delete(self.parentZone.zone, self.domain, self.type, callback=success, errback=errback)
def function[delete, parameter[self, callback, errback]]: constant[ Delete the record from the zone, including all advanced configuration, meta data, etc. ] if <ast.UnaryOp object at 0x7da1b068a290> begin[:] <ast.Raise object at 0x7da1b068b340> def function[success, parameter[result]]: if name[callback] begin[:] return[call[name[callback], parameter[name[result]]]] return[call[name[self]._rest.delete, parameter[name[self].parentZone.zone, name[self].domain, name[self].type]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[callback] = keyword[None] , identifier[errback] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[data] : keyword[raise] identifier[RecordException] ( literal[string] ) keyword[def] identifier[success] ( identifier[result] ,* identifier[args] ): keyword[if] identifier[callback] : keyword[return] identifier[callback] ( identifier[result] ) keyword[else] : keyword[return] identifier[result] keyword[return] identifier[self] . identifier[_rest] . identifier[delete] ( identifier[self] . identifier[parentZone] . identifier[zone] , identifier[self] . identifier[domain] , identifier[self] . identifier[type] , identifier[callback] = identifier[success] , identifier[errback] = identifier[errback] )
def delete(self, callback=None, errback=None): """ Delete the record from the zone, including all advanced configuration, meta data, etc. """ if not self.data: raise RecordException('record not loaded') # depends on [control=['if'], data=[]] def success(result, *args): if callback: return callback(result) # depends on [control=['if'], data=[]] else: return result return self._rest.delete(self.parentZone.zone, self.domain, self.type, callback=success, errback=errback)
def camera_disable(self, camera_id, **kwargs): """Disable camera.""" api = self._api_info['camera'] payload = dict({ '_sid': self._sid, 'api': api['name'], 'method': 'Disable', 'version': 9, 'idList': camera_id, }, **kwargs) print(api['url']) print(payload) response = self._get(api['url'], payload) return response['success']
def function[camera_disable, parameter[self, camera_id]]: constant[Disable camera.] variable[api] assign[=] call[name[self]._api_info][constant[camera]] variable[payload] assign[=] call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da1b02850c0>, <ast.Constant object at 0x7da1b0286980>, <ast.Constant object at 0x7da1b0286a10>, <ast.Constant object at 0x7da1b0286890>, <ast.Constant object at 0x7da1b0284460>], [<ast.Attribute object at 0x7da1b0286260>, <ast.Subscript object at 0x7da1b02866e0>, <ast.Constant object at 0x7da1b0286590>, <ast.Constant object at 0x7da1b02868f0>, <ast.Name object at 0x7da1b0286080>]]]] call[name[print], parameter[call[name[api]][constant[url]]]] call[name[print], parameter[name[payload]]] variable[response] assign[=] call[name[self]._get, parameter[call[name[api]][constant[url]], name[payload]]] return[call[name[response]][constant[success]]]
keyword[def] identifier[camera_disable] ( identifier[self] , identifier[camera_id] ,** identifier[kwargs] ): literal[string] identifier[api] = identifier[self] . identifier[_api_info] [ literal[string] ] identifier[payload] = identifier[dict] ({ literal[string] : identifier[self] . identifier[_sid] , literal[string] : identifier[api] [ literal[string] ], literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : identifier[camera_id] , },** identifier[kwargs] ) identifier[print] ( identifier[api] [ literal[string] ]) identifier[print] ( identifier[payload] ) identifier[response] = identifier[self] . identifier[_get] ( identifier[api] [ literal[string] ], identifier[payload] ) keyword[return] identifier[response] [ literal[string] ]
def camera_disable(self, camera_id, **kwargs): """Disable camera.""" api = self._api_info['camera'] payload = dict({'_sid': self._sid, 'api': api['name'], 'method': 'Disable', 'version': 9, 'idList': camera_id}, **kwargs) print(api['url']) print(payload) response = self._get(api['url'], payload) return response['success']
def add_named_concept_filters(self, named_filter_concepts): """ Adds named concept filters :param named_filter_concepts: dict with named filter concepts which will be mapped as the key as query param and the value as search string """ for concept_key, concept_name in named_filter_concepts.items(): self.add_concept_filter(concept_key, concept_name=concept_name)
def function[add_named_concept_filters, parameter[self, named_filter_concepts]]: constant[ Adds named concept filters :param named_filter_concepts: dict with named filter concepts which will be mapped as the key as query param and the value as search string ] for taget[tuple[[<ast.Name object at 0x7da1b18879a0>, <ast.Name object at 0x7da1b18866e0>]]] in starred[call[name[named_filter_concepts].items, parameter[]]] begin[:] call[name[self].add_concept_filter, parameter[name[concept_key]]]
keyword[def] identifier[add_named_concept_filters] ( identifier[self] , identifier[named_filter_concepts] ): literal[string] keyword[for] identifier[concept_key] , identifier[concept_name] keyword[in] identifier[named_filter_concepts] . identifier[items] (): identifier[self] . identifier[add_concept_filter] ( identifier[concept_key] , identifier[concept_name] = identifier[concept_name] )
def add_named_concept_filters(self, named_filter_concepts): """ Adds named concept filters :param named_filter_concepts: dict with named filter concepts which will be mapped as the key as query param and the value as search string """ for (concept_key, concept_name) in named_filter_concepts.items(): self.add_concept_filter(concept_key, concept_name=concept_name) # depends on [control=['for'], data=[]]
def _keep( word_freq: int, min_freq: int, min_len: int, max_len: int, dict_filter: Callable[[str], bool], ): """ Keep only Thai words with at least min_freq frequency and has length between min_len and max_len characters """ if not word_freq or word_freq[1] < min_freq: return False word = word_freq[0] if not word or len(word) < min_len or len(word) > max_len or word[0] == ".": return False return dict_filter(word)
def function[_keep, parameter[word_freq, min_freq, min_len, max_len, dict_filter]]: constant[ Keep only Thai words with at least min_freq frequency and has length between min_len and max_len characters ] if <ast.BoolOp object at 0x7da1b17a4940> begin[:] return[constant[False]] variable[word] assign[=] call[name[word_freq]][constant[0]] if <ast.BoolOp object at 0x7da1b17a65c0> begin[:] return[constant[False]] return[call[name[dict_filter], parameter[name[word]]]]
keyword[def] identifier[_keep] ( identifier[word_freq] : identifier[int] , identifier[min_freq] : identifier[int] , identifier[min_len] : identifier[int] , identifier[max_len] : identifier[int] , identifier[dict_filter] : identifier[Callable] [[ identifier[str] ], identifier[bool] ], ): literal[string] keyword[if] keyword[not] identifier[word_freq] keyword[or] identifier[word_freq] [ literal[int] ]< identifier[min_freq] : keyword[return] keyword[False] identifier[word] = identifier[word_freq] [ literal[int] ] keyword[if] keyword[not] identifier[word] keyword[or] identifier[len] ( identifier[word] )< identifier[min_len] keyword[or] identifier[len] ( identifier[word] )> identifier[max_len] keyword[or] identifier[word] [ literal[int] ]== literal[string] : keyword[return] keyword[False] keyword[return] identifier[dict_filter] ( identifier[word] )
def _keep(word_freq: int, min_freq: int, min_len: int, max_len: int, dict_filter: Callable[[str], bool]): """ Keep only Thai words with at least min_freq frequency and has length between min_len and max_len characters """ if not word_freq or word_freq[1] < min_freq: return False # depends on [control=['if'], data=[]] word = word_freq[0] if not word or len(word) < min_len or len(word) > max_len or (word[0] == '.'): return False # depends on [control=['if'], data=[]] return dict_filter(word)
def merge(self, imgs): """Merge image channels. Parameters ---------- imgs : `list` of `PIL.Image.Image` Returns ------- `PIL.Image.Image` Raises ------ ValueError If image channel list is empty. """ if not imgs: raise ValueError('empty channel list') if len(imgs) == 1: return imgs[0] return Image.merge(self.mode, imgs)
def function[merge, parameter[self, imgs]]: constant[Merge image channels. Parameters ---------- imgs : `list` of `PIL.Image.Image` Returns ------- `PIL.Image.Image` Raises ------ ValueError If image channel list is empty. ] if <ast.UnaryOp object at 0x7da1b26ade10> begin[:] <ast.Raise object at 0x7da1b25472e0> if compare[call[name[len], parameter[name[imgs]]] equal[==] constant[1]] begin[:] return[call[name[imgs]][constant[0]]] return[call[name[Image].merge, parameter[name[self].mode, name[imgs]]]]
keyword[def] identifier[merge] ( identifier[self] , identifier[imgs] ): literal[string] keyword[if] keyword[not] identifier[imgs] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[len] ( identifier[imgs] )== literal[int] : keyword[return] identifier[imgs] [ literal[int] ] keyword[return] identifier[Image] . identifier[merge] ( identifier[self] . identifier[mode] , identifier[imgs] )
def merge(self, imgs): """Merge image channels. Parameters ---------- imgs : `list` of `PIL.Image.Image` Returns ------- `PIL.Image.Image` Raises ------ ValueError If image channel list is empty. """ if not imgs: raise ValueError('empty channel list') # depends on [control=['if'], data=[]] if len(imgs) == 1: return imgs[0] # depends on [control=['if'], data=[]] return Image.merge(self.mode, imgs)
def resource_response(resource, depth=0): """Return a response for the *resource* of the appropriate content type. :param resource: resource to be returned in request :type resource: :class:`sandman.model.Model` :rtype: :class:`flask.Response` """ if _get_acceptable_response_type() == JSON: depth = 0 if 'expand' in request.args: depth = 1 return _single_resource_json_response(resource, depth) else: return _single_resource_html_response(resource)
def function[resource_response, parameter[resource, depth]]: constant[Return a response for the *resource* of the appropriate content type. :param resource: resource to be returned in request :type resource: :class:`sandman.model.Model` :rtype: :class:`flask.Response` ] if compare[call[name[_get_acceptable_response_type], parameter[]] equal[==] name[JSON]] begin[:] variable[depth] assign[=] constant[0] if compare[constant[expand] in name[request].args] begin[:] variable[depth] assign[=] constant[1] return[call[name[_single_resource_json_response], parameter[name[resource], name[depth]]]]
keyword[def] identifier[resource_response] ( identifier[resource] , identifier[depth] = literal[int] ): literal[string] keyword[if] identifier[_get_acceptable_response_type] ()== identifier[JSON] : identifier[depth] = literal[int] keyword[if] literal[string] keyword[in] identifier[request] . identifier[args] : identifier[depth] = literal[int] keyword[return] identifier[_single_resource_json_response] ( identifier[resource] , identifier[depth] ) keyword[else] : keyword[return] identifier[_single_resource_html_response] ( identifier[resource] )
def resource_response(resource, depth=0): """Return a response for the *resource* of the appropriate content type. :param resource: resource to be returned in request :type resource: :class:`sandman.model.Model` :rtype: :class:`flask.Response` """ if _get_acceptable_response_type() == JSON: depth = 0 if 'expand' in request.args: depth = 1 # depends on [control=['if'], data=[]] return _single_resource_json_response(resource, depth) # depends on [control=['if'], data=[]] else: return _single_resource_html_response(resource)
def _parse_msg_for_mongodb(self, msgs): ''' >>> mdbf = MongoDBForwarder('no_host', '27017', 'deadpool', ... 'chimichanga', 'logs', 'collection') >>> log = [{u'data': {u'_': {u'file': u'log.py', ... u'fn': u'start', ... u'ln': 8, ... u'name': u'__main__'}, ... u'a': 1, ... u'b': 2, ... u'msg': u'this is a dummy log'}, ... u'error': False, ... u'error_tb': u'', ... u'event': u'some_log', ... u'file': u'/var/log/sample.log', ... u'formatter': u'logagg.formatters.basescript', ... u'host': u'deepcompute', ... u'id': u'20180409T095924_aec36d313bdc11e89da654e1ad04f45e', ... u'level': u'info', ... u'raw': u'{...}', ... u'timestamp': u'2018-04-09T09:59:24.733945Z', ... u'type': u'metric'}] >>> records = mdbf._parse_msg_for_mongodb(log) >>> from pprint import pprint >>> pprint(records) [{'_id': u'20180409T095924_aec36d313bdc11e89da654e1ad04f45e', u'data': {u'_': {u'file': u'log.py', u'fn': u'start', u'ln': 8, u'name': u'__main__'}, u'a': 1, u'b': 2, u'msg': u'this is a dummy log'}, u'error': False, u'error_tb': u'', u'event': u'some_log', u'file': u'/var/log/sample.log', u'formatter': u'logagg.formatters.basescript', u'host': u'deepcompute', u'level': u'info', u'raw': u'{...}', u'timestamp': u'2018-04-09T09:59:24.733945Z', u'type': u'metric'}] ''' msgs_list = [] for msg in msgs: try: msg['_id'] = msg.pop('id') except KeyError: self.log.exception('collector_failure_id_not_found', log=msg) msgs_list.append(msg) return msgs_list
def function[_parse_msg_for_mongodb, parameter[self, msgs]]: constant[ >>> mdbf = MongoDBForwarder('no_host', '27017', 'deadpool', ... 'chimichanga', 'logs', 'collection') >>> log = [{u'data': {u'_': {u'file': u'log.py', ... u'fn': u'start', ... u'ln': 8, ... u'name': u'__main__'}, ... u'a': 1, ... u'b': 2, ... u'msg': u'this is a dummy log'}, ... u'error': False, ... u'error_tb': u'', ... u'event': u'some_log', ... u'file': u'/var/log/sample.log', ... u'formatter': u'logagg.formatters.basescript', ... u'host': u'deepcompute', ... u'id': u'20180409T095924_aec36d313bdc11e89da654e1ad04f45e', ... u'level': u'info', ... u'raw': u'{...}', ... u'timestamp': u'2018-04-09T09:59:24.733945Z', ... u'type': u'metric'}] >>> records = mdbf._parse_msg_for_mongodb(log) >>> from pprint import pprint >>> pprint(records) [{'_id': u'20180409T095924_aec36d313bdc11e89da654e1ad04f45e', u'data': {u'_': {u'file': u'log.py', u'fn': u'start', u'ln': 8, u'name': u'__main__'}, u'a': 1, u'b': 2, u'msg': u'this is a dummy log'}, u'error': False, u'error_tb': u'', u'event': u'some_log', u'file': u'/var/log/sample.log', u'formatter': u'logagg.formatters.basescript', u'host': u'deepcompute', u'level': u'info', u'raw': u'{...}', u'timestamp': u'2018-04-09T09:59:24.733945Z', u'type': u'metric'}] ] variable[msgs_list] assign[=] list[[]] for taget[name[msg]] in starred[name[msgs]] begin[:] <ast.Try object at 0x7da20c991270> call[name[msgs_list].append, parameter[name[msg]]] return[name[msgs_list]]
keyword[def] identifier[_parse_msg_for_mongodb] ( identifier[self] , identifier[msgs] ): literal[string] identifier[msgs_list] =[] keyword[for] identifier[msg] keyword[in] identifier[msgs] : keyword[try] : identifier[msg] [ literal[string] ]= identifier[msg] . identifier[pop] ( literal[string] ) keyword[except] identifier[KeyError] : identifier[self] . identifier[log] . identifier[exception] ( literal[string] , identifier[log] = identifier[msg] ) identifier[msgs_list] . identifier[append] ( identifier[msg] ) keyword[return] identifier[msgs_list]
def _parse_msg_for_mongodb(self, msgs): """ >>> mdbf = MongoDBForwarder('no_host', '27017', 'deadpool', ... 'chimichanga', 'logs', 'collection') >>> log = [{u'data': {u'_': {u'file': u'log.py', ... u'fn': u'start', ... u'ln': 8, ... u'name': u'__main__'}, ... u'a': 1, ... u'b': 2, ... u'msg': u'this is a dummy log'}, ... u'error': False, ... u'error_tb': u'', ... u'event': u'some_log', ... u'file': u'/var/log/sample.log', ... u'formatter': u'logagg.formatters.basescript', ... u'host': u'deepcompute', ... u'id': u'20180409T095924_aec36d313bdc11e89da654e1ad04f45e', ... u'level': u'info', ... u'raw': u'{...}', ... u'timestamp': u'2018-04-09T09:59:24.733945Z', ... u'type': u'metric'}] >>> records = mdbf._parse_msg_for_mongodb(log) >>> from pprint import pprint >>> pprint(records) [{'_id': u'20180409T095924_aec36d313bdc11e89da654e1ad04f45e', u'data': {u'_': {u'file': u'log.py', u'fn': u'start', u'ln': 8, u'name': u'__main__'}, u'a': 1, u'b': 2, u'msg': u'this is a dummy log'}, u'error': False, u'error_tb': u'', u'event': u'some_log', u'file': u'/var/log/sample.log', u'formatter': u'logagg.formatters.basescript', u'host': u'deepcompute', u'level': u'info', u'raw': u'{...}', u'timestamp': u'2018-04-09T09:59:24.733945Z', u'type': u'metric'}] """ msgs_list = [] for msg in msgs: try: msg['_id'] = msg.pop('id') # depends on [control=['try'], data=[]] except KeyError: self.log.exception('collector_failure_id_not_found', log=msg) # depends on [control=['except'], data=[]] msgs_list.append(msg) # depends on [control=['for'], data=['msg']] return msgs_list
def as_dict(self): """ Json-serializable dict representation of PhononDos. """ return {"@module": self.__class__.__module__, "@class": self.__class__.__name__, "frequencies": list(self.frequencies), "densities": list(self.densities)}
def function[as_dict, parameter[self]]: constant[ Json-serializable dict representation of PhononDos. ] return[dictionary[[<ast.Constant object at 0x7da20c6c5ff0>, <ast.Constant object at 0x7da20c6c5ae0>, <ast.Constant object at 0x7da20c6c6e30>, <ast.Constant object at 0x7da20c6c7460>], [<ast.Attribute object at 0x7da20c6c5cc0>, <ast.Attribute object at 0x7da20c6c74f0>, <ast.Call object at 0x7da20c6c4a30>, <ast.Call object at 0x7da20c6c6680>]]]
keyword[def] identifier[as_dict] ( identifier[self] ): literal[string] keyword[return] { literal[string] : identifier[self] . identifier[__class__] . identifier[__module__] , literal[string] : identifier[self] . identifier[__class__] . identifier[__name__] , literal[string] : identifier[list] ( identifier[self] . identifier[frequencies] ), literal[string] : identifier[list] ( identifier[self] . identifier[densities] )}
def as_dict(self): """ Json-serializable dict representation of PhononDos. """ return {'@module': self.__class__.__module__, '@class': self.__class__.__name__, 'frequencies': list(self.frequencies), 'densities': list(self.densities)}
def request_absolute_cursor_position(self): """ Get current cursor position. For vt100: Do CPR request. (answer will arrive later.) For win32: Do API call. (Answer comes immediately.) """ # Only do this request when the cursor is at the top row. (after a # clear or reset). We will rely on that in `report_absolute_cursor_row`. assert self._cursor_pos.y == 0 # For Win32, we have an API call to get the number of rows below the # cursor. if is_windows(): self._min_available_height = self.output.get_rows_below_cursor_position() else: if self.use_alternate_screen: self._min_available_height = self.output.get_size().rows else: # Asks for a cursor position report (CPR). self.waiting_for_cpr = True self.output.ask_for_cpr()
def function[request_absolute_cursor_position, parameter[self]]: constant[ Get current cursor position. For vt100: Do CPR request. (answer will arrive later.) For win32: Do API call. (Answer comes immediately.) ] assert[compare[name[self]._cursor_pos.y equal[==] constant[0]]] if call[name[is_windows], parameter[]] begin[:] name[self]._min_available_height assign[=] call[name[self].output.get_rows_below_cursor_position, parameter[]]
keyword[def] identifier[request_absolute_cursor_position] ( identifier[self] ): literal[string] keyword[assert] identifier[self] . identifier[_cursor_pos] . identifier[y] == literal[int] keyword[if] identifier[is_windows] (): identifier[self] . identifier[_min_available_height] = identifier[self] . identifier[output] . identifier[get_rows_below_cursor_position] () keyword[else] : keyword[if] identifier[self] . identifier[use_alternate_screen] : identifier[self] . identifier[_min_available_height] = identifier[self] . identifier[output] . identifier[get_size] (). identifier[rows] keyword[else] : identifier[self] . identifier[waiting_for_cpr] = keyword[True] identifier[self] . identifier[output] . identifier[ask_for_cpr] ()
def request_absolute_cursor_position(self): """ Get current cursor position. For vt100: Do CPR request. (answer will arrive later.) For win32: Do API call. (Answer comes immediately.) """ # Only do this request when the cursor is at the top row. (after a # clear or reset). We will rely on that in `report_absolute_cursor_row`. assert self._cursor_pos.y == 0 # For Win32, we have an API call to get the number of rows below the # cursor. if is_windows(): self._min_available_height = self.output.get_rows_below_cursor_position() # depends on [control=['if'], data=[]] elif self.use_alternate_screen: self._min_available_height = self.output.get_size().rows # depends on [control=['if'], data=[]] else: # Asks for a cursor position report (CPR). self.waiting_for_cpr = True self.output.ask_for_cpr()
def _length_hint(obj): """Returns the length hint of an object.""" try: return len(obj) except (AttributeError, TypeError): try: get_hint = type(obj).__length_hint__ except AttributeError: return None try: hint = get_hint(obj) except TypeError: return None if hint is NotImplemented or \ not isinstance(hint, int_types) or \ hint < 0: return None return hint
def function[_length_hint, parameter[obj]]: constant[Returns the length hint of an object.] <ast.Try object at 0x7da18ede50c0>
keyword[def] identifier[_length_hint] ( identifier[obj] ): literal[string] keyword[try] : keyword[return] identifier[len] ( identifier[obj] ) keyword[except] ( identifier[AttributeError] , identifier[TypeError] ): keyword[try] : identifier[get_hint] = identifier[type] ( identifier[obj] ). identifier[__length_hint__] keyword[except] identifier[AttributeError] : keyword[return] keyword[None] keyword[try] : identifier[hint] = identifier[get_hint] ( identifier[obj] ) keyword[except] identifier[TypeError] : keyword[return] keyword[None] keyword[if] identifier[hint] keyword[is] identifier[NotImplemented] keyword[or] keyword[not] identifier[isinstance] ( identifier[hint] , identifier[int_types] ) keyword[or] identifier[hint] < literal[int] : keyword[return] keyword[None] keyword[return] identifier[hint]
def _length_hint(obj): """Returns the length hint of an object.""" try: return len(obj) # depends on [control=['try'], data=[]] except (AttributeError, TypeError): try: get_hint = type(obj).__length_hint__ # depends on [control=['try'], data=[]] except AttributeError: return None # depends on [control=['except'], data=[]] try: hint = get_hint(obj) # depends on [control=['try'], data=[]] except TypeError: return None # depends on [control=['except'], data=[]] if hint is NotImplemented or not isinstance(hint, int_types) or hint < 0: return None # depends on [control=['if'], data=[]] return hint # depends on [control=['except'], data=[]]
def decrypt_message(encrypted_data, decryption_key): """Function parses an ASN.1 encrypted message and extracts/decrypts the original message. :param encrypted_data: A CMS ASN.1 byte string containing the encrypted data. :param decryption_key: The key to be used for decrypting the data. :return: A byte string containing the decrypted original message. """ cms_content = cms.ContentInfo.load(encrypted_data) cipher, decrypted_content = None, None if cms_content['content_type'].native == 'enveloped_data': recipient_info = cms_content['content']['recipient_infos'][0].parse() key_enc_alg = recipient_info[ 'key_encryption_algorithm']['algorithm'].native encrypted_key = recipient_info['encrypted_key'].native if key_enc_alg == 'rsa': try: key = asymmetric.rsa_pkcs1v15_decrypt( decryption_key[0], encrypted_key) except Exception as e: raise DecryptionError('Failed to decrypt the payload: ' 'Could not extract decryption key.') alg = cms_content['content']['encrypted_content_info'][ 'content_encryption_algorithm'] encapsulated_data = cms_content['content'][ 'encrypted_content_info']['encrypted_content'].native try: if alg.encryption_cipher == 'tripledes': cipher = 'tripledes_192_cbc' decrypted_content = symmetric.tripledes_cbc_pkcs5_decrypt( key, encapsulated_data, alg.encryption_iv) else: raise AS2Exception('Unsupported Encryption Algorithm') except Exception as e: raise DecryptionError( 'Failed to decrypt the payload: {}'.format(e)) return cipher, decrypted_content
def function[decrypt_message, parameter[encrypted_data, decryption_key]]: constant[Function parses an ASN.1 encrypted message and extracts/decrypts the original message. :param encrypted_data: A CMS ASN.1 byte string containing the encrypted data. :param decryption_key: The key to be used for decrypting the data. :return: A byte string containing the decrypted original message. ] variable[cms_content] assign[=] call[name[cms].ContentInfo.load, parameter[name[encrypted_data]]] <ast.Tuple object at 0x7da18fe908e0> assign[=] tuple[[<ast.Constant object at 0x7da18fe93010>, <ast.Constant object at 0x7da18fe92200>]] if compare[call[name[cms_content]][constant[content_type]].native equal[==] constant[enveloped_data]] begin[:] variable[recipient_info] assign[=] call[call[call[call[name[cms_content]][constant[content]]][constant[recipient_infos]]][constant[0]].parse, parameter[]] variable[key_enc_alg] assign[=] call[call[name[recipient_info]][constant[key_encryption_algorithm]]][constant[algorithm]].native variable[encrypted_key] assign[=] call[name[recipient_info]][constant[encrypted_key]].native if compare[name[key_enc_alg] equal[==] constant[rsa]] begin[:] <ast.Try object at 0x7da18fe93370> variable[alg] assign[=] call[call[call[name[cms_content]][constant[content]]][constant[encrypted_content_info]]][constant[content_encryption_algorithm]] variable[encapsulated_data] assign[=] call[call[call[name[cms_content]][constant[content]]][constant[encrypted_content_info]]][constant[encrypted_content]].native <ast.Try object at 0x7da204962f80> return[tuple[[<ast.Name object at 0x7da18fe93d90>, <ast.Name object at 0x7da18fe91780>]]]
keyword[def] identifier[decrypt_message] ( identifier[encrypted_data] , identifier[decryption_key] ): literal[string] identifier[cms_content] = identifier[cms] . identifier[ContentInfo] . identifier[load] ( identifier[encrypted_data] ) identifier[cipher] , identifier[decrypted_content] = keyword[None] , keyword[None] keyword[if] identifier[cms_content] [ literal[string] ]. identifier[native] == literal[string] : identifier[recipient_info] = identifier[cms_content] [ literal[string] ][ literal[string] ][ literal[int] ]. identifier[parse] () identifier[key_enc_alg] = identifier[recipient_info] [ literal[string] ][ literal[string] ]. identifier[native] identifier[encrypted_key] = identifier[recipient_info] [ literal[string] ]. identifier[native] keyword[if] identifier[key_enc_alg] == literal[string] : keyword[try] : identifier[key] = identifier[asymmetric] . identifier[rsa_pkcs1v15_decrypt] ( identifier[decryption_key] [ literal[int] ], identifier[encrypted_key] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[DecryptionError] ( literal[string] literal[string] ) identifier[alg] = identifier[cms_content] [ literal[string] ][ literal[string] ][ literal[string] ] identifier[encapsulated_data] = identifier[cms_content] [ literal[string] ][ literal[string] ][ literal[string] ]. identifier[native] keyword[try] : keyword[if] identifier[alg] . identifier[encryption_cipher] == literal[string] : identifier[cipher] = literal[string] identifier[decrypted_content] = identifier[symmetric] . identifier[tripledes_cbc_pkcs5_decrypt] ( identifier[key] , identifier[encapsulated_data] , identifier[alg] . identifier[encryption_iv] ) keyword[else] : keyword[raise] identifier[AS2Exception] ( literal[string] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[DecryptionError] ( literal[string] . identifier[format] ( identifier[e] )) keyword[return] identifier[cipher] , identifier[decrypted_content]
def decrypt_message(encrypted_data, decryption_key): """Function parses an ASN.1 encrypted message and extracts/decrypts the original message. :param encrypted_data: A CMS ASN.1 byte string containing the encrypted data. :param decryption_key: The key to be used for decrypting the data. :return: A byte string containing the decrypted original message. """ cms_content = cms.ContentInfo.load(encrypted_data) (cipher, decrypted_content) = (None, None) if cms_content['content_type'].native == 'enveloped_data': recipient_info = cms_content['content']['recipient_infos'][0].parse() key_enc_alg = recipient_info['key_encryption_algorithm']['algorithm'].native encrypted_key = recipient_info['encrypted_key'].native if key_enc_alg == 'rsa': try: key = asymmetric.rsa_pkcs1v15_decrypt(decryption_key[0], encrypted_key) # depends on [control=['try'], data=[]] except Exception as e: raise DecryptionError('Failed to decrypt the payload: Could not extract decryption key.') # depends on [control=['except'], data=[]] alg = cms_content['content']['encrypted_content_info']['content_encryption_algorithm'] encapsulated_data = cms_content['content']['encrypted_content_info']['encrypted_content'].native try: if alg.encryption_cipher == 'tripledes': cipher = 'tripledes_192_cbc' decrypted_content = symmetric.tripledes_cbc_pkcs5_decrypt(key, encapsulated_data, alg.encryption_iv) # depends on [control=['if'], data=[]] else: raise AS2Exception('Unsupported Encryption Algorithm') # depends on [control=['try'], data=[]] except Exception as e: raise DecryptionError('Failed to decrypt the payload: {}'.format(e)) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return (cipher, decrypted_content)
def addDocEntity(self, name, type, ExternalID, SystemID, content): """Register a new entity for this document. """ ret = libxml2mod.xmlAddDocEntity(self._o, name, type, ExternalID, SystemID, content) if ret is None:raise treeError('xmlAddDocEntity() failed') __tmp = xmlEntity(_obj=ret) return __tmp
def function[addDocEntity, parameter[self, name, type, ExternalID, SystemID, content]]: constant[Register a new entity for this document. ] variable[ret] assign[=] call[name[libxml2mod].xmlAddDocEntity, parameter[name[self]._o, name[name], name[type], name[ExternalID], name[SystemID], name[content]]] if compare[name[ret] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1f05480> variable[__tmp] assign[=] call[name[xmlEntity], parameter[]] return[name[__tmp]]
keyword[def] identifier[addDocEntity] ( identifier[self] , identifier[name] , identifier[type] , identifier[ExternalID] , identifier[SystemID] , identifier[content] ): literal[string] identifier[ret] = identifier[libxml2mod] . identifier[xmlAddDocEntity] ( identifier[self] . identifier[_o] , identifier[name] , identifier[type] , identifier[ExternalID] , identifier[SystemID] , identifier[content] ) keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[raise] identifier[treeError] ( literal[string] ) identifier[__tmp] = identifier[xmlEntity] ( identifier[_obj] = identifier[ret] ) keyword[return] identifier[__tmp]
def addDocEntity(self, name, type, ExternalID, SystemID, content): """Register a new entity for this document. """ ret = libxml2mod.xmlAddDocEntity(self._o, name, type, ExternalID, SystemID, content) if ret is None: raise treeError('xmlAddDocEntity() failed') # depends on [control=['if'], data=[]] __tmp = xmlEntity(_obj=ret) return __tmp
def unblock(self, item: str) -> None: """ Unblock an item and/or reset it's fail count :param str item: The item to unblock """ assert item is not None item = self._encode_item(item) watchlist_key = self.__redis_conf['watchlist_template'].format(item) blacklist_key = self.__redis_conf['blacklist_template'].format(item) connection = self.__get_connection() connection.delete(watchlist_key) connection.delete(blacklist_key) BlackRed.__release_connection(connection)
def function[unblock, parameter[self, item]]: constant[ Unblock an item and/or reset it's fail count :param str item: The item to unblock ] assert[compare[name[item] is_not constant[None]]] variable[item] assign[=] call[name[self]._encode_item, parameter[name[item]]] variable[watchlist_key] assign[=] call[call[name[self].__redis_conf][constant[watchlist_template]].format, parameter[name[item]]] variable[blacklist_key] assign[=] call[call[name[self].__redis_conf][constant[blacklist_template]].format, parameter[name[item]]] variable[connection] assign[=] call[name[self].__get_connection, parameter[]] call[name[connection].delete, parameter[name[watchlist_key]]] call[name[connection].delete, parameter[name[blacklist_key]]] call[name[BlackRed].__release_connection, parameter[name[connection]]]
keyword[def] identifier[unblock] ( identifier[self] , identifier[item] : identifier[str] )-> keyword[None] : literal[string] keyword[assert] identifier[item] keyword[is] keyword[not] keyword[None] identifier[item] = identifier[self] . identifier[_encode_item] ( identifier[item] ) identifier[watchlist_key] = identifier[self] . identifier[__redis_conf] [ literal[string] ]. identifier[format] ( identifier[item] ) identifier[blacklist_key] = identifier[self] . identifier[__redis_conf] [ literal[string] ]. identifier[format] ( identifier[item] ) identifier[connection] = identifier[self] . identifier[__get_connection] () identifier[connection] . identifier[delete] ( identifier[watchlist_key] ) identifier[connection] . identifier[delete] ( identifier[blacklist_key] ) identifier[BlackRed] . identifier[__release_connection] ( identifier[connection] )
def unblock(self, item: str) -> None: """ Unblock an item and/or reset it's fail count :param str item: The item to unblock """ assert item is not None item = self._encode_item(item) watchlist_key = self.__redis_conf['watchlist_template'].format(item) blacklist_key = self.__redis_conf['blacklist_template'].format(item) connection = self.__get_connection() connection.delete(watchlist_key) connection.delete(blacklist_key) BlackRed.__release_connection(connection)
def read_string(self, content): """ Reads a Python string that contains C++ code, and return the declarations tree. """ header_file = utils.create_temp_file_name(suffix='.h') with open(header_file, "w+") as f: f.write(content) try: decls = self.read_file(header_file) except Exception: utils.remove_file_no_raise(header_file, self.__config) raise utils.remove_file_no_raise(header_file, self.__config) return decls
def function[read_string, parameter[self, content]]: constant[ Reads a Python string that contains C++ code, and return the declarations tree. ] variable[header_file] assign[=] call[name[utils].create_temp_file_name, parameter[]] with call[name[open], parameter[name[header_file], constant[w+]]] begin[:] call[name[f].write, parameter[name[content]]] <ast.Try object at 0x7da18dc9abc0> call[name[utils].remove_file_no_raise, parameter[name[header_file], name[self].__config]] return[name[decls]]
keyword[def] identifier[read_string] ( identifier[self] , identifier[content] ): literal[string] identifier[header_file] = identifier[utils] . identifier[create_temp_file_name] ( identifier[suffix] = literal[string] ) keyword[with] identifier[open] ( identifier[header_file] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[content] ) keyword[try] : identifier[decls] = identifier[self] . identifier[read_file] ( identifier[header_file] ) keyword[except] identifier[Exception] : identifier[utils] . identifier[remove_file_no_raise] ( identifier[header_file] , identifier[self] . identifier[__config] ) keyword[raise] identifier[utils] . identifier[remove_file_no_raise] ( identifier[header_file] , identifier[self] . identifier[__config] ) keyword[return] identifier[decls]
def read_string(self, content): """ Reads a Python string that contains C++ code, and return the declarations tree. """ header_file = utils.create_temp_file_name(suffix='.h') with open(header_file, 'w+') as f: f.write(content) # depends on [control=['with'], data=['f']] try: decls = self.read_file(header_file) # depends on [control=['try'], data=[]] except Exception: utils.remove_file_no_raise(header_file, self.__config) raise # depends on [control=['except'], data=[]] utils.remove_file_no_raise(header_file, self.__config) return decls
def plot_spectra_pages_pdf(ss, pdf_filename='pages.pdf', setup=_default_setup): """ Plots spectra into a PDF file, one spectrum per page. Splits into several pieces of width Args: ss: list of Spectrum objects pdf_filename: name of output file """ logger = a99.get_python_logger() xmin, xmax, ymin_, ymax, xspan, yspan = calc_max_min(ss) ymin = ymin_ if setup.ymin is None else setup.ymin num_pages = len(ss) a99.format_BLB() pdf = matplotlib.backends.backend_pdf.PdfPages(pdf_filename) for i, s in enumerate(ss): title = s.title fig = plt.figure() plt.plot(s.x, s.y, c=_FAV_COLOR) if setup.flag_xlabel and setup.fmt_xlabel: _set_plot(plt.xlabel, setup.fmt_xlabel, s) if setup.flag_ylabel and setup.fmt_ylabel: _set_plot(plt.ylabel, setup.fmt_ylabel, s) _set_plot(plt.title, setup.fmt_title, s) plt.xlim([xmin-xspan*_T, xmax+xspan*_T]) plt.ylim([ymin-yspan*_T, ymax+yspan*_T]) plt.tight_layout() plt.subplots_adjust(top=0.94) # workaround for cropped title logger.info("Printing page {0:d}/{1:d} ('{2!s}')".format(i+1, num_pages, title)) pdf.savefig(fig) plt.close() pdf.close() logger.info("File {0!s} successfully created.".format(pdf_filename))
def function[plot_spectra_pages_pdf, parameter[ss, pdf_filename, setup]]: constant[ Plots spectra into a PDF file, one spectrum per page. Splits into several pieces of width Args: ss: list of Spectrum objects pdf_filename: name of output file ] variable[logger] assign[=] call[name[a99].get_python_logger, parameter[]] <ast.Tuple object at 0x7da207f9ba00> assign[=] call[name[calc_max_min], parameter[name[ss]]] variable[ymin] assign[=] <ast.IfExp object at 0x7da207f9bdc0> variable[num_pages] assign[=] call[name[len], parameter[name[ss]]] call[name[a99].format_BLB, parameter[]] variable[pdf] assign[=] call[name[matplotlib].backends.backend_pdf.PdfPages, parameter[name[pdf_filename]]] for taget[tuple[[<ast.Name object at 0x7da207f9ac20>, <ast.Name object at 0x7da207f9b3a0>]]] in starred[call[name[enumerate], parameter[name[ss]]]] begin[:] variable[title] assign[=] name[s].title variable[fig] assign[=] call[name[plt].figure, parameter[]] call[name[plt].plot, parameter[name[s].x, name[s].y]] if <ast.BoolOp object at 0x7da20c7961a0> begin[:] call[name[_set_plot], parameter[name[plt].xlabel, name[setup].fmt_xlabel, name[s]]] if <ast.BoolOp object at 0x7da20c796500> begin[:] call[name[_set_plot], parameter[name[plt].ylabel, name[setup].fmt_ylabel, name[s]]] call[name[_set_plot], parameter[name[plt].title, name[setup].fmt_title, name[s]]] call[name[plt].xlim, parameter[list[[<ast.BinOp object at 0x7da20c795480>, <ast.BinOp object at 0x7da20c794a90>]]]] call[name[plt].ylim, parameter[list[[<ast.BinOp object at 0x7da20c796620>, <ast.BinOp object at 0x7da20c7952d0>]]]] call[name[plt].tight_layout, parameter[]] call[name[plt].subplots_adjust, parameter[]] call[name[logger].info, parameter[call[constant[Printing page {0:d}/{1:d} ('{2!s}')].format, parameter[binary_operation[name[i] + constant[1]], name[num_pages], name[title]]]]] call[name[pdf].savefig, parameter[name[fig]]] call[name[plt].close, parameter[]] call[name[pdf].close, parameter[]] call[name[logger].info, parameter[call[constant[File {0!s} successfully created.].format, parameter[name[pdf_filename]]]]]
keyword[def] identifier[plot_spectra_pages_pdf] ( identifier[ss] , identifier[pdf_filename] = literal[string] , identifier[setup] = identifier[_default_setup] ): literal[string] identifier[logger] = identifier[a99] . identifier[get_python_logger] () identifier[xmin] , identifier[xmax] , identifier[ymin_] , identifier[ymax] , identifier[xspan] , identifier[yspan] = identifier[calc_max_min] ( identifier[ss] ) identifier[ymin] = identifier[ymin_] keyword[if] identifier[setup] . identifier[ymin] keyword[is] keyword[None] keyword[else] identifier[setup] . identifier[ymin] identifier[num_pages] = identifier[len] ( identifier[ss] ) identifier[a99] . identifier[format_BLB] () identifier[pdf] = identifier[matplotlib] . identifier[backends] . identifier[backend_pdf] . identifier[PdfPages] ( identifier[pdf_filename] ) keyword[for] identifier[i] , identifier[s] keyword[in] identifier[enumerate] ( identifier[ss] ): identifier[title] = identifier[s] . identifier[title] identifier[fig] = identifier[plt] . identifier[figure] () identifier[plt] . identifier[plot] ( identifier[s] . identifier[x] , identifier[s] . identifier[y] , identifier[c] = identifier[_FAV_COLOR] ) keyword[if] identifier[setup] . identifier[flag_xlabel] keyword[and] identifier[setup] . identifier[fmt_xlabel] : identifier[_set_plot] ( identifier[plt] . identifier[xlabel] , identifier[setup] . identifier[fmt_xlabel] , identifier[s] ) keyword[if] identifier[setup] . identifier[flag_ylabel] keyword[and] identifier[setup] . identifier[fmt_ylabel] : identifier[_set_plot] ( identifier[plt] . identifier[ylabel] , identifier[setup] . identifier[fmt_ylabel] , identifier[s] ) identifier[_set_plot] ( identifier[plt] . identifier[title] , identifier[setup] . identifier[fmt_title] , identifier[s] ) identifier[plt] . identifier[xlim] ([ identifier[xmin] - identifier[xspan] * identifier[_T] , identifier[xmax] + identifier[xspan] * identifier[_T] ]) identifier[plt] . identifier[ylim] ([ identifier[ymin] - identifier[yspan] * identifier[_T] , identifier[ymax] + identifier[yspan] * identifier[_T] ]) identifier[plt] . identifier[tight_layout] () identifier[plt] . identifier[subplots_adjust] ( identifier[top] = literal[int] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[i] + literal[int] , identifier[num_pages] , identifier[title] )) identifier[pdf] . identifier[savefig] ( identifier[fig] ) identifier[plt] . identifier[close] () identifier[pdf] . identifier[close] () identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[pdf_filename] ))
def plot_spectra_pages_pdf(ss, pdf_filename='pages.pdf', setup=_default_setup): """ Plots spectra into a PDF file, one spectrum per page. Splits into several pieces of width Args: ss: list of Spectrum objects pdf_filename: name of output file """ logger = a99.get_python_logger() (xmin, xmax, ymin_, ymax, xspan, yspan) = calc_max_min(ss) ymin = ymin_ if setup.ymin is None else setup.ymin num_pages = len(ss) a99.format_BLB() pdf = matplotlib.backends.backend_pdf.PdfPages(pdf_filename) for (i, s) in enumerate(ss): title = s.title fig = plt.figure() plt.plot(s.x, s.y, c=_FAV_COLOR) if setup.flag_xlabel and setup.fmt_xlabel: _set_plot(plt.xlabel, setup.fmt_xlabel, s) # depends on [control=['if'], data=[]] if setup.flag_ylabel and setup.fmt_ylabel: _set_plot(plt.ylabel, setup.fmt_ylabel, s) # depends on [control=['if'], data=[]] _set_plot(plt.title, setup.fmt_title, s) plt.xlim([xmin - xspan * _T, xmax + xspan * _T]) plt.ylim([ymin - yspan * _T, ymax + yspan * _T]) plt.tight_layout() plt.subplots_adjust(top=0.94) # workaround for cropped title logger.info("Printing page {0:d}/{1:d} ('{2!s}')".format(i + 1, num_pages, title)) pdf.savefig(fig) plt.close() # depends on [control=['for'], data=[]] pdf.close() logger.info('File {0!s} successfully created.'.format(pdf_filename))
def get_gradients(self, analytes=None, win=15, filt=False, samples=None, subset=None, recalc=True): """ Collect all data from all samples into a single array. Data from standards is not collected. Parameters ---------- filt : str, dict or bool Either logical filter expression contained in a str, a dict of expressions specifying the filter string to use for each analyte or a boolean. Passed to `grab_filt`. samples : str or list which samples to get subset : str or int which subset to get Returns ------- None """ if analytes is None: analytes = self.analytes if samples is not None: subset = self.make_subset(samples) samples = self._get_samples(subset) # check if gradients already calculated if all([self.data[s].grads_calced for s in samples]) and hasattr(self, 'gradients'): if not recalc: print("Using existing gradients. Set recalc=True to re-calculate.") return if not hasattr(self, 'gradients'): self.gradients = Bunch() # t = 0 focus = {'uTime': []} focus.update({a: [] for a in analytes}) with self.pbar.set(total=len(samples), desc='Calculating Gradients') as prog: for sa in samples: s = self.data[sa] focus['uTime'].append(s.uTime) ind = s.filt.grab_filt(filt) grads = calc_grads(s.uTime, s.focus, keys=analytes, win=win) for a in analytes: tmp = grads[a] tmp[~ind] = np.nan focus[a].append(tmp) s.grads = tmp s.grads_calced = True prog.update() self.gradients.update({k: np.concatenate(v) for k, v, in focus.items()}) return
def function[get_gradients, parameter[self, analytes, win, filt, samples, subset, recalc]]: constant[ Collect all data from all samples into a single array. Data from standards is not collected. Parameters ---------- filt : str, dict or bool Either logical filter expression contained in a str, a dict of expressions specifying the filter string to use for each analyte or a boolean. Passed to `grab_filt`. samples : str or list which samples to get subset : str or int which subset to get Returns ------- None ] if compare[name[analytes] is constant[None]] begin[:] variable[analytes] assign[=] name[self].analytes if compare[name[samples] is_not constant[None]] begin[:] variable[subset] assign[=] call[name[self].make_subset, parameter[name[samples]]] variable[samples] assign[=] call[name[self]._get_samples, parameter[name[subset]]] if <ast.BoolOp object at 0x7da1b01bd3c0> begin[:] if <ast.UnaryOp object at 0x7da1b01bf310> begin[:] call[name[print], parameter[constant[Using existing gradients. Set recalc=True to re-calculate.]]] return[None] if <ast.UnaryOp object at 0x7da1b01bf1f0> begin[:] name[self].gradients assign[=] call[name[Bunch], parameter[]] variable[focus] assign[=] dictionary[[<ast.Constant object at 0x7da1b01bf640>], [<ast.List object at 0x7da1b01bccd0>]] call[name[focus].update, parameter[<ast.DictComp object at 0x7da1b01bf670>]] with call[name[self].pbar.set, parameter[]] begin[:] for taget[name[sa]] in starred[name[samples]] begin[:] variable[s] assign[=] call[name[self].data][name[sa]] call[call[name[focus]][constant[uTime]].append, parameter[name[s].uTime]] variable[ind] assign[=] call[name[s].filt.grab_filt, parameter[name[filt]]] variable[grads] assign[=] call[name[calc_grads], parameter[name[s].uTime, name[s].focus]] for taget[name[a]] in starred[name[analytes]] begin[:] variable[tmp] assign[=] call[name[grads]][name[a]] call[name[tmp]][<ast.UnaryOp object at 0x7da1b01b9540>] assign[=] name[np].nan call[call[name[focus]][name[a]].append, parameter[name[tmp]]] name[s].grads assign[=] name[tmp] name[s].grads_calced assign[=] constant[True] call[name[prog].update, parameter[]] call[name[self].gradients.update, parameter[<ast.DictComp object at 0x7da1b01b9510>]] return[None]
keyword[def] identifier[get_gradients] ( identifier[self] , identifier[analytes] = keyword[None] , identifier[win] = literal[int] , identifier[filt] = keyword[False] , identifier[samples] = keyword[None] , identifier[subset] = keyword[None] , identifier[recalc] = keyword[True] ): literal[string] keyword[if] identifier[analytes] keyword[is] keyword[None] : identifier[analytes] = identifier[self] . identifier[analytes] keyword[if] identifier[samples] keyword[is] keyword[not] keyword[None] : identifier[subset] = identifier[self] . identifier[make_subset] ( identifier[samples] ) identifier[samples] = identifier[self] . identifier[_get_samples] ( identifier[subset] ) keyword[if] identifier[all] ([ identifier[self] . identifier[data] [ identifier[s] ]. identifier[grads_calced] keyword[for] identifier[s] keyword[in] identifier[samples] ]) keyword[and] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[if] keyword[not] identifier[recalc] : identifier[print] ( literal[string] ) keyword[return] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[gradients] = identifier[Bunch] () identifier[focus] ={ literal[string] :[]} identifier[focus] . identifier[update] ({ identifier[a] :[] keyword[for] identifier[a] keyword[in] identifier[analytes] }) keyword[with] identifier[self] . identifier[pbar] . identifier[set] ( identifier[total] = identifier[len] ( identifier[samples] ), identifier[desc] = literal[string] ) keyword[as] identifier[prog] : keyword[for] identifier[sa] keyword[in] identifier[samples] : identifier[s] = identifier[self] . identifier[data] [ identifier[sa] ] identifier[focus] [ literal[string] ]. identifier[append] ( identifier[s] . identifier[uTime] ) identifier[ind] = identifier[s] . identifier[filt] . identifier[grab_filt] ( identifier[filt] ) identifier[grads] = identifier[calc_grads] ( identifier[s] . identifier[uTime] , identifier[s] . identifier[focus] , identifier[keys] = identifier[analytes] , identifier[win] = identifier[win] ) keyword[for] identifier[a] keyword[in] identifier[analytes] : identifier[tmp] = identifier[grads] [ identifier[a] ] identifier[tmp] [~ identifier[ind] ]= identifier[np] . identifier[nan] identifier[focus] [ identifier[a] ]. identifier[append] ( identifier[tmp] ) identifier[s] . identifier[grads] = identifier[tmp] identifier[s] . identifier[grads_calced] = keyword[True] identifier[prog] . identifier[update] () identifier[self] . identifier[gradients] . identifier[update] ({ identifier[k] : identifier[np] . identifier[concatenate] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] , keyword[in] identifier[focus] . identifier[items] ()}) keyword[return]
def get_gradients(self, analytes=None, win=15, filt=False, samples=None, subset=None, recalc=True): """ Collect all data from all samples into a single array. Data from standards is not collected. Parameters ---------- filt : str, dict or bool Either logical filter expression contained in a str, a dict of expressions specifying the filter string to use for each analyte or a boolean. Passed to `grab_filt`. samples : str or list which samples to get subset : str or int which subset to get Returns ------- None """ if analytes is None: analytes = self.analytes # depends on [control=['if'], data=['analytes']] if samples is not None: subset = self.make_subset(samples) # depends on [control=['if'], data=['samples']] samples = self._get_samples(subset) # check if gradients already calculated if all([self.data[s].grads_calced for s in samples]) and hasattr(self, 'gradients'): if not recalc: print('Using existing gradients. Set recalc=True to re-calculate.') return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not hasattr(self, 'gradients'): self.gradients = Bunch() # depends on [control=['if'], data=[]] # t = 0 focus = {'uTime': []} focus.update({a: [] for a in analytes}) with self.pbar.set(total=len(samples), desc='Calculating Gradients') as prog: for sa in samples: s = self.data[sa] focus['uTime'].append(s.uTime) ind = s.filt.grab_filt(filt) grads = calc_grads(s.uTime, s.focus, keys=analytes, win=win) for a in analytes: tmp = grads[a] tmp[~ind] = np.nan focus[a].append(tmp) s.grads = tmp # depends on [control=['for'], data=['a']] s.grads_calced = True prog.update() # depends on [control=['for'], data=['sa']] # depends on [control=['with'], data=['prog']] self.gradients.update({k: np.concatenate(v) for (k, v) in focus.items()}) return
def SegmentMax(a, ids): """ Segmented max op. """ func = lambda idxs: np.amax(a[idxs], axis=0) return seg_map(func, a, ids),
def function[SegmentMax, parameter[a, ids]]: constant[ Segmented max op. ] variable[func] assign[=] <ast.Lambda object at 0x7da1b06fe770> return[tuple[[<ast.Call object at 0x7da1b0651870>]]]
keyword[def] identifier[SegmentMax] ( identifier[a] , identifier[ids] ): literal[string] identifier[func] = keyword[lambda] identifier[idxs] : identifier[np] . identifier[amax] ( identifier[a] [ identifier[idxs] ], identifier[axis] = literal[int] ) keyword[return] identifier[seg_map] ( identifier[func] , identifier[a] , identifier[ids] ),
def SegmentMax(a, ids): """ Segmented max op. """ func = lambda idxs: np.amax(a[idxs], axis=0) return (seg_map(func, a, ids),)
def atrm_magic(meas_file, dir_path=".", input_dir_path="", input_spec_file='specimens.txt', output_spec_file='specimens.txt', data_model_num=3): """ Converts ATRM data to best-fit tensor (6 elements plus sigma) Parameters ---------- meas_file : str input measurement file dir_path : str output directory, default "." input_dir_path : str input file directory IF different from dir_path, default "" input_spec_file : str input specimen file name, default "specimens.txt" output_spec_file : str output specimen file name, default "specimens.txt" data_model_num : number MagIC data model [2, 3], default 3 Returns --------- Tuple : (True or False indicating if conversion was sucessful, output file name written) """ # fix up file names input_dir_path, dir_path = pmag.fix_directories(input_dir_path, dir_path) meas_file = pmag.resolve_file_name(meas_file, input_dir_path) rmag_anis = os.path.join(dir_path, 'rmag_anisotropy.txt') rmag_res = os.path.join(dir_path, 'rmag_results.txt') input_spec_file = pmag.resolve_file_name(input_spec_file, input_dir_path) output_spec_file = pmag.resolve_file_name(output_spec_file, dir_path) # read in data if data_model_num == 3: meas_data = [] meas_data3, file_type = pmag.magic_read(meas_file) if file_type != 'measurements': print( "-E- {} is not a valid measurements file, {}".format(meas_file, file_type)) return False # convert meas_data to 2.5 for rec in meas_data3: meas_map = map_magic.meas_magic3_2_magic2_map meas_data.append(map_magic.mapping(rec, meas_map)) old_spec_recs, file_type = pmag.magic_read(input_spec_file) if file_type != 'specimens': print("-W- {} is not a valid specimens file ".format(input_spec_file)) old_spec_recs = [] spec_recs = [] for rec in old_spec_recs: spec_map = map_magic.spec_magic3_2_magic2_map spec_recs.append(map_magic.mapping(rec, spec_map)) else: meas_data, file_type = pmag.magic_read(meas_file) if file_type != 'magic_measurements': print("-E- {} is is not a valid magic_measurements file ".format(file_type)) return False, "{} is not a valid magic_measurements file, {}".format(meas_file, file_type) meas_data = pmag.get_dictitem( meas_data, 'magic_method_codes', 'LP-AN-TRM', 'has') if not len(meas_data): print("-E- No measurement records found with code LP-AN-TRM") return False, "No measurement records found with code LP-AN-TRM" # # # get sorted list of unique specimen names ssort = [] for rec in meas_data: spec = rec["er_specimen_name"] if spec not in ssort: ssort.append(spec) sids = sorted(ssort) # # # work on each specimen # specimen, npos = 0, 6 RmagSpecRecs, RmagResRecs = [], [] SpecRecs, SpecRecs3 = [], [] while specimen < len(sids): nmeas = 0 s = sids[specimen] RmagSpecRec = {} RmagResRec = {} # get old specrec here if applicable if data_model_num == 3: if spec_recs: try: RmagResRec = pmag.get_dictitem( spec_recs, 'er_specimen_name', s, 'T')[0] RmagSpecRec = pmag.get_dictitem( spec_recs, 'er_specimen_name', s, 'T')[0] except IndexError: pass BX, X = [], [] method_codes = [] Spec0 = "" # # find the data from the meas_data file for this sample # and get dec, inc, int and convert to x,y,z # # fish out data for this specimen name data = pmag.get_dictitem(meas_data, 'er_specimen_name', s, 'T') if len(data) > 5: RmagSpecRec["rmag_anisotropy_name"] = data[0]["er_specimen_name"] RmagSpecRec["er_location_name"] = data[0].get( "er_location_name", "") RmagSpecRec["er_specimen_name"] = data[0]["er_specimen_name"] if not "er_sample_name" in RmagSpecRec: RmagSpecRec["er_sample_name"] = data[0].get( "er_sample_name", "") RmagSpecRec["er_site_name"] = data[0].get("er_site_name", "") RmagSpecRec["magic_experiment_names"] = RmagSpecRec["rmag_anisotropy_name"] + ":ATRM" RmagSpecRec["er_citation_names"] = "This study" RmagResRec["rmag_result_name"] = data[0]["er_specimen_name"] + ":ATRM" RmagResRec["er_location_names"] = data[0].get( "er_location_names", "") RmagResRec["er_specimen_names"] = data[0]["er_specimen_name"] if data_model_num == 2: RmagResRec["er_sample_names"] = data[0].get( "er_sample_name", "") RmagResRec["er_site_names"] = data[0].get("er_site_name", "") RmagResRec["magic_experiment_names"] = RmagSpecRec["rmag_anisotropy_name"] + ":ATRM" RmagResRec["er_citation_names"] = "This study" RmagSpecRec["anisotropy_type"] = "ATRM" if "magic_instrument_codes" in list(data[0].keys()): RmagSpecRec["magic_instrument_codes"] = data[0]["magic_instrument_codes"] else: RmagSpecRec["magic_instrument_codes"] = "" RmagSpecRec["anisotropy_description"] = "Hext statistics adapted to ATRM" for rec in data: meths = rec['magic_method_codes'].strip().split(':') Dir = [] Dir.append(float(rec["measurement_dec"])) Dir.append(float(rec["measurement_inc"])) Dir.append(float(rec["measurement_magn_moment"])) if "LT-T-Z" in meths: BX.append(pmag.dir2cart(Dir)) # append baseline steps elif "LT-T-I" in meths: X.append(pmag.dir2cart(Dir)) nmeas += 1 # if len(BX) == 1: for i in range(len(X) - 1): BX.append(BX[0]) # assume first 0 field step as baseline elif len(BX) == 0: # assume baseline is zero for i in range(len(X)): BX.append([0., 0., 0.]) # assume baseline of 0 elif len(BX) != len(X): # if BX isn't just one measurement or one in between every infield step, just assume it is zero print('something odd about the baselines - just assuming zero') for i in range(len(X)): BX.append([0., 0., 0.]) # assume baseline of 0 if nmeas < 6: # must have at least 6 measurements right now - print('skipping specimen ', s, ' too few measurements') specimen += 1 else: # B matrix made from design matrix for positions B, H, tmpH = pmag.designATRM(npos) # # subtract optional baseline and put in a work array # work = np.zeros((nmeas, 3), 'f') for i in range(nmeas): for j in range(3): # subtract baseline, if available work[i][j] = X[i][j] - BX[i][j] # # calculate tensor elements # first put ARM components in w vector # w = np.zeros((npos * 3), 'f') index = 0 for i in range(npos): for j in range(3): w[index] = work[i][j] index += 1 s = np.zeros((6), 'f') # initialize the s matrix for i in range(6): for j in range(len(w)): s[i] += B[i][j] * w[j] trace = s[0] + s[1] + s[2] # normalize by the trace for i in range(6): s[i] = s[i] / trace a = pmag.s2a(s) # ------------------------------------------------------------ # Calculating dels is different than in the Kappabridge # routine. Use trace normalized tensor (a) and the applied # unit field directions (tmpH) to generate model X,Y,Z # components. Then compare these with the measured values. # ------------------------------------------------------------ S = 0. comp = np.zeros((npos * 3), 'f') for i in range(npos): for j in range(3): index = i * 3 + j compare = a[j][0] * tmpH[i][0] + a[j][1] * \ tmpH[i][1] + a[j][2] * tmpH[i][2] comp[index] = compare for i in range(npos * 3): d = (w[i] / trace) - comp[i] # del values S += d * d nf = float(npos * 3. - 6.) # number of degrees of freedom if S > 0: sigma = np.sqrt(S / nf) else: sigma = 0 hpars = pmag.dohext(nf, sigma, s) # # prepare for output # RmagSpecRec["anisotropy_s1"] = '%8.6f' % (s[0]) RmagSpecRec["anisotropy_s2"] = '%8.6f' % (s[1]) RmagSpecRec["anisotropy_s3"] = '%8.6f' % (s[2]) RmagSpecRec["anisotropy_s4"] = '%8.6f' % (s[3]) RmagSpecRec["anisotropy_s5"] = '%8.6f' % (s[4]) RmagSpecRec["anisotropy_s6"] = '%8.6f' % (s[5]) RmagSpecRec["anisotropy_mean"] = '%8.3e' % (trace / 3) RmagSpecRec["anisotropy_sigma"] = '%8.6f' % (sigma) RmagSpecRec["anisotropy_unit"] = "Am^2" RmagSpecRec["anisotropy_n"] = '%i' % (npos) RmagSpecRec["anisotropy_tilt_correction"] = '-1' # used by thellier_gui - must be taken out for uploading RmagSpecRec["anisotropy_F"] = '%7.1f ' % (hpars["F"]) # used by thellier_gui - must be taken out for uploading RmagSpecRec["anisotropy_F_crit"] = hpars["F_crit"] RmagResRec["anisotropy_t1"] = '%8.6f ' % (hpars["t1"]) RmagResRec["anisotropy_t2"] = '%8.6f ' % (hpars["t2"]) RmagResRec["anisotropy_t3"] = '%8.6f ' % (hpars["t3"]) RmagResRec["anisotropy_v1_dec"] = '%7.1f ' % (hpars["v1_dec"]) RmagResRec["anisotropy_v2_dec"] = '%7.1f ' % (hpars["v2_dec"]) RmagResRec["anisotropy_v3_dec"] = '%7.1f ' % (hpars["v3_dec"]) RmagResRec["anisotropy_v1_inc"] = '%7.1f ' % (hpars["v1_inc"]) RmagResRec["anisotropy_v2_inc"] = '%7.1f ' % (hpars["v2_inc"]) RmagResRec["anisotropy_v3_inc"] = '%7.1f ' % (hpars["v3_inc"]) RmagResRec["anisotropy_ftest"] = '%7.1f ' % (hpars["F"]) RmagResRec["anisotropy_ftest12"] = '%7.1f ' % (hpars["F12"]) RmagResRec["anisotropy_ftest23"] = '%7.1f ' % (hpars["F23"]) RmagResRec["result_description"] = 'Critical F: ' + \ hpars["F_crit"] + ';Critical F12/F13: ' + hpars["F12_crit"] if hpars["e12"] > hpars["e13"]: RmagResRec["anisotropy_v1_zeta_semi_angle"] = '%7.1f ' % ( hpars['e12']) RmagResRec["anisotropy_v1_zeta_dec"] = '%7.1f ' % ( hpars['v2_dec']) RmagResRec["anisotropy_v1_zeta_inc"] = '%7.1f ' % ( hpars['v2_inc']) RmagResRec["anisotropy_v2_zeta_semi_angle"] = '%7.1f ' % ( hpars['e12']) RmagResRec["anisotropy_v2_zeta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v2_zeta_inc"] = '%7.1f ' % ( hpars['v1_inc']) RmagResRec["anisotropy_v1_eta_semi_angle"] = '%7.1f ' % ( hpars['e13']) RmagResRec["anisotropy_v1_eta_dec"] = '%7.1f ' % ( hpars['v3_dec']) RmagResRec["anisotropy_v1_eta_inc"] = '%7.1f ' % ( hpars['v3_inc']) RmagResRec["anisotropy_v3_eta_semi_angle"] = '%7.1f ' % ( hpars['e13']) RmagResRec["anisotropy_v3_eta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v3_eta_inc"] = '%7.1f ' % ( hpars['v1_inc']) else: RmagResRec["anisotropy_v1_zeta_semi_angle"] = '%7.1f ' % ( hpars['e13']) RmagResRec["anisotropy_v1_zeta_dec"] = '%7.1f ' % ( hpars['v3_dec']) RmagResRec["anisotropy_v1_zeta_inc"] = '%7.1f ' % ( hpars['v3_inc']) RmagResRec["anisotropy_v3_zeta_semi_angle"] = '%7.1f ' % ( hpars['e13']) RmagResRec["anisotropy_v3_zeta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v3_zeta_inc"] = '%7.1f ' % ( hpars['v1_inc']) RmagResRec["anisotropy_v1_eta_semi_angle"] = '%7.1f ' % ( hpars['e12']) RmagResRec["anisotropy_v1_eta_dec"] = '%7.1f ' % ( hpars['v2_dec']) RmagResRec["anisotropy_v1_eta_inc"] = '%7.1f ' % ( hpars['v2_inc']) RmagResRec["anisotropy_v2_eta_semi_angle"] = '%7.1f ' % ( hpars['e12']) RmagResRec["anisotropy_v2_eta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v2_eta_inc"] = '%7.1f ' % ( hpars['v1_inc']) if hpars["e23"] > hpars['e12']: RmagResRec["anisotropy_v2_zeta_semi_angle"] = '%7.1f ' % ( hpars['e23']) RmagResRec["anisotropy_v2_zeta_dec"] = '%7.1f ' % ( hpars['v3_dec']) RmagResRec["anisotropy_v2_zeta_inc"] = '%7.1f ' % ( hpars['v3_inc']) RmagResRec["anisotropy_v3_zeta_semi_angle"] = '%7.1f ' % ( hpars['e23']) RmagResRec["anisotropy_v3_zeta_dec"] = '%7.1f ' % ( hpars['v2_dec']) RmagResRec["anisotropy_v3_zeta_inc"] = '%7.1f ' % ( hpars['v2_inc']) RmagResRec["anisotropy_v3_eta_semi_angle"] = '%7.1f ' % ( hpars['e13']) RmagResRec["anisotropy_v3_eta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v3_eta_inc"] = '%7.1f ' % ( hpars['v1_inc']) RmagResRec["anisotropy_v2_eta_semi_angle"] = '%7.1f ' % ( hpars['e12']) RmagResRec["anisotropy_v2_eta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v2_eta_inc"] = '%7.1f ' % ( hpars['v1_inc']) else: RmagResRec["anisotropy_v2_zeta_semi_angle"] = '%7.1f ' % ( hpars['e12']) RmagResRec["anisotropy_v2_zeta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v2_zeta_inc"] = '%7.1f ' % ( hpars['v1_inc']) RmagResRec["anisotropy_v3_eta_semi_angle"] = '%7.1f ' % ( hpars['e23']) RmagResRec["anisotropy_v3_eta_dec"] = '%7.1f ' % ( hpars['v2_dec']) RmagResRec["anisotropy_v3_eta_inc"] = '%7.1f ' % ( hpars['v2_inc']) RmagResRec["anisotropy_v3_zeta_semi_angle"] = '%7.1f ' % ( hpars['e13']) RmagResRec["anisotropy_v3_zeta_dec"] = '%7.1f ' % ( hpars['v1_dec']) RmagResRec["anisotropy_v3_zeta_inc"] = '%7.1f ' % ( hpars['v1_inc']) RmagResRec["anisotropy_v2_eta_semi_angle"] = '%7.1f ' % ( hpars['e23']) RmagResRec["anisotropy_v2_eta_dec"] = '%7.1f ' % ( hpars['v3_dec']) RmagResRec["anisotropy_v2_eta_inc"] = '%7.1f ' % ( hpars['v3_inc']) RmagResRec["tilt_correction"] = '-1' RmagResRec["anisotropy_type"] = 'ATRM' RmagResRec["magic_method_codes"] = 'LP-AN-TRM:AE-H' RmagSpecRec["magic_method_codes"] = 'LP-AN-TRM:AE-H' RmagResRec["magic_software_packages"] = pmag.get_version() RmagSpecRec["magic_software_packages"] = pmag.get_version() RmagSpecRecs.append(RmagSpecRec) RmagResRecs.append(RmagResRec) specimen += 1 if data_model_num == 3: SpecRec = RmagResRec.copy() SpecRec.update(RmagSpecRec) SpecRecs.append(SpecRec) # finished iterating through specimens, # now we need to write out the data to files if data_model_num == 3: # translate records for rec in SpecRecs: rec3 = map_magic.convert_aniso('magic3', rec) SpecRecs3.append(rec3) # write output to 3.0 specimens file pmag.magic_write(output_spec_file, SpecRecs3, 'specimens') print("specimen data stored in {}".format(output_spec_file)) return True, output_spec_file else: # write output to 2.5 rmag_ files pmag.magic_write(rmag_anis, RmagSpecRecs, 'rmag_anisotropy') print("specimen tensor elements stored in ", rmag_anis) pmag.magic_write(rmag_res, RmagResRecs, 'rmag_results') print("specimen statistics and eigenparameters stored in ", rmag_res) return True, rmag_anis
def function[atrm_magic, parameter[meas_file, dir_path, input_dir_path, input_spec_file, output_spec_file, data_model_num]]: constant[ Converts ATRM data to best-fit tensor (6 elements plus sigma) Parameters ---------- meas_file : str input measurement file dir_path : str output directory, default "." input_dir_path : str input file directory IF different from dir_path, default "" input_spec_file : str input specimen file name, default "specimens.txt" output_spec_file : str output specimen file name, default "specimens.txt" data_model_num : number MagIC data model [2, 3], default 3 Returns --------- Tuple : (True or False indicating if conversion was sucessful, output file name written) ] <ast.Tuple object at 0x7da20e9b0220> assign[=] call[name[pmag].fix_directories, parameter[name[input_dir_path], name[dir_path]]] variable[meas_file] assign[=] call[name[pmag].resolve_file_name, parameter[name[meas_file], name[input_dir_path]]] variable[rmag_anis] assign[=] call[name[os].path.join, parameter[name[dir_path], constant[rmag_anisotropy.txt]]] variable[rmag_res] assign[=] call[name[os].path.join, parameter[name[dir_path], constant[rmag_results.txt]]] variable[input_spec_file] assign[=] call[name[pmag].resolve_file_name, parameter[name[input_spec_file], name[input_dir_path]]] variable[output_spec_file] assign[=] call[name[pmag].resolve_file_name, parameter[name[output_spec_file], name[dir_path]]] if compare[name[data_model_num] equal[==] constant[3]] begin[:] variable[meas_data] assign[=] list[[]] <ast.Tuple object at 0x7da20e9b1c60> assign[=] call[name[pmag].magic_read, parameter[name[meas_file]]] if compare[name[file_type] not_equal[!=] constant[measurements]] begin[:] call[name[print], parameter[call[constant[-E- {} is not a valid measurements file, {}].format, parameter[name[meas_file], name[file_type]]]]] return[constant[False]] for taget[name[rec]] in starred[name[meas_data3]] begin[:] variable[meas_map] assign[=] name[map_magic].meas_magic3_2_magic2_map call[name[meas_data].append, parameter[call[name[map_magic].mapping, parameter[name[rec], name[meas_map]]]]] <ast.Tuple object at 0x7da20e9b2800> assign[=] call[name[pmag].magic_read, parameter[name[input_spec_file]]] if compare[name[file_type] not_equal[!=] constant[specimens]] begin[:] call[name[print], parameter[call[constant[-W- {} is not a valid specimens file ].format, parameter[name[input_spec_file]]]]] variable[old_spec_recs] assign[=] list[[]] variable[spec_recs] assign[=] list[[]] for taget[name[rec]] in starred[name[old_spec_recs]] begin[:] variable[spec_map] assign[=] name[map_magic].spec_magic3_2_magic2_map call[name[spec_recs].append, parameter[call[name[map_magic].mapping, parameter[name[rec], name[spec_map]]]]] variable[meas_data] assign[=] call[name[pmag].get_dictitem, parameter[name[meas_data], constant[magic_method_codes], constant[LP-AN-TRM], constant[has]]] if <ast.UnaryOp object at 0x7da1b047a4a0> begin[:] call[name[print], parameter[constant[-E- No measurement records found with code LP-AN-TRM]]] return[tuple[[<ast.Constant object at 0x7da1b0479870>, <ast.Constant object at 0x7da1b047a1a0>]]] variable[ssort] assign[=] list[[]] for taget[name[rec]] in starred[name[meas_data]] begin[:] variable[spec] assign[=] call[name[rec]][constant[er_specimen_name]] if compare[name[spec] <ast.NotIn object at 0x7da2590d7190> name[ssort]] begin[:] call[name[ssort].append, parameter[name[spec]]] variable[sids] assign[=] call[name[sorted], parameter[name[ssort]]] <ast.Tuple object at 0x7da1b047be80> assign[=] tuple[[<ast.Constant object at 0x7da1b0479900>, <ast.Constant object at 0x7da1b047a050>]] <ast.Tuple object at 0x7da1b047b5e0> assign[=] tuple[[<ast.List object at 0x7da1b0478280>, <ast.List object at 0x7da1b0478430>]] <ast.Tuple object at 0x7da1b0479090> assign[=] tuple[[<ast.List object at 0x7da1b047bac0>, <ast.List object at 0x7da1b047ae90>]] while compare[name[specimen] less[<] call[name[len], parameter[name[sids]]]] begin[:] variable[nmeas] assign[=] constant[0] variable[s] assign[=] call[name[sids]][name[specimen]] variable[RmagSpecRec] assign[=] dictionary[[], []] variable[RmagResRec] assign[=] dictionary[[], []] if compare[name[data_model_num] equal[==] constant[3]] begin[:] if name[spec_recs] begin[:] <ast.Try object at 0x7da1b0478e50> <ast.Tuple object at 0x7da1b047b4c0> assign[=] tuple[[<ast.List object at 0x7da1b0478e80>, <ast.List object at 0x7da1b0478a90>]] variable[method_codes] assign[=] list[[]] variable[Spec0] assign[=] constant[] variable[data] assign[=] call[name[pmag].get_dictitem, parameter[name[meas_data], constant[er_specimen_name], name[s], constant[T]]] if compare[call[name[len], parameter[name[data]]] greater[>] constant[5]] begin[:] call[name[RmagSpecRec]][constant[rmag_anisotropy_name]] assign[=] call[call[name[data]][constant[0]]][constant[er_specimen_name]] call[name[RmagSpecRec]][constant[er_location_name]] assign[=] call[call[name[data]][constant[0]].get, parameter[constant[er_location_name], constant[]]] call[name[RmagSpecRec]][constant[er_specimen_name]] assign[=] call[call[name[data]][constant[0]]][constant[er_specimen_name]] if <ast.UnaryOp object at 0x7da1b047b8e0> begin[:] call[name[RmagSpecRec]][constant[er_sample_name]] assign[=] call[call[name[data]][constant[0]].get, parameter[constant[er_sample_name], constant[]]] call[name[RmagSpecRec]][constant[er_site_name]] assign[=] call[call[name[data]][constant[0]].get, parameter[constant[er_site_name], constant[]]] call[name[RmagSpecRec]][constant[magic_experiment_names]] assign[=] binary_operation[call[name[RmagSpecRec]][constant[rmag_anisotropy_name]] + constant[:ATRM]] call[name[RmagSpecRec]][constant[er_citation_names]] assign[=] constant[This study] call[name[RmagResRec]][constant[rmag_result_name]] assign[=] binary_operation[call[call[name[data]][constant[0]]][constant[er_specimen_name]] + constant[:ATRM]] call[name[RmagResRec]][constant[er_location_names]] assign[=] call[call[name[data]][constant[0]].get, parameter[constant[er_location_names], constant[]]] call[name[RmagResRec]][constant[er_specimen_names]] assign[=] call[call[name[data]][constant[0]]][constant[er_specimen_name]] if compare[name[data_model_num] equal[==] constant[2]] begin[:] call[name[RmagResRec]][constant[er_sample_names]] assign[=] call[call[name[data]][constant[0]].get, parameter[constant[er_sample_name], constant[]]] call[name[RmagResRec]][constant[er_site_names]] assign[=] call[call[name[data]][constant[0]].get, parameter[constant[er_site_name], constant[]]] call[name[RmagResRec]][constant[magic_experiment_names]] assign[=] binary_operation[call[name[RmagSpecRec]][constant[rmag_anisotropy_name]] + constant[:ATRM]] call[name[RmagResRec]][constant[er_citation_names]] assign[=] constant[This study] call[name[RmagSpecRec]][constant[anisotropy_type]] assign[=] constant[ATRM] if compare[constant[magic_instrument_codes] in call[name[list], parameter[call[call[name[data]][constant[0]].keys, parameter[]]]]] begin[:] call[name[RmagSpecRec]][constant[magic_instrument_codes]] assign[=] call[call[name[data]][constant[0]]][constant[magic_instrument_codes]] for taget[name[rec]] in starred[name[data]] begin[:] variable[meths] assign[=] call[call[call[name[rec]][constant[magic_method_codes]].strip, parameter[]].split, parameter[constant[:]]] variable[Dir] assign[=] list[[]] call[name[Dir].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]]]] call[name[Dir].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]]]] call[name[Dir].append, parameter[call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]]]] if compare[constant[LT-T-Z] in name[meths]] begin[:] call[name[BX].append, parameter[call[name[pmag].dir2cart, parameter[name[Dir]]]]] if compare[call[name[len], parameter[name[BX]]] equal[==] constant[1]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[X]]] - constant[1]]]]] begin[:] call[name[BX].append, parameter[call[name[BX]][constant[0]]]] if compare[name[nmeas] less[<] constant[6]] begin[:] call[name[print], parameter[constant[skipping specimen ], name[s], constant[ too few measurements]]] <ast.AugAssign object at 0x7da1b0337130> if compare[name[data_model_num] equal[==] constant[3]] begin[:] variable[SpecRec] assign[=] call[name[RmagResRec].copy, parameter[]] call[name[SpecRec].update, parameter[name[RmagSpecRec]]] call[name[SpecRecs].append, parameter[name[SpecRec]]] if compare[name[data_model_num] equal[==] constant[3]] begin[:] for taget[name[rec]] in starred[name[SpecRecs]] begin[:] variable[rec3] assign[=] call[name[map_magic].convert_aniso, parameter[constant[magic3], name[rec]]] call[name[SpecRecs3].append, parameter[name[rec3]]] call[name[pmag].magic_write, parameter[name[output_spec_file], name[SpecRecs3], constant[specimens]]] call[name[print], parameter[call[constant[specimen data stored in {}].format, parameter[name[output_spec_file]]]]] return[tuple[[<ast.Constant object at 0x7da20c6a8e50>, <ast.Name object at 0x7da20c6ab520>]]]
keyword[def] identifier[atrm_magic] ( identifier[meas_file] , identifier[dir_path] = literal[string] , identifier[input_dir_path] = literal[string] , identifier[input_spec_file] = literal[string] , identifier[output_spec_file] = literal[string] , identifier[data_model_num] = literal[int] ): literal[string] identifier[input_dir_path] , identifier[dir_path] = identifier[pmag] . identifier[fix_directories] ( identifier[input_dir_path] , identifier[dir_path] ) identifier[meas_file] = identifier[pmag] . identifier[resolve_file_name] ( identifier[meas_file] , identifier[input_dir_path] ) identifier[rmag_anis] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , literal[string] ) identifier[rmag_res] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , literal[string] ) identifier[input_spec_file] = identifier[pmag] . identifier[resolve_file_name] ( identifier[input_spec_file] , identifier[input_dir_path] ) identifier[output_spec_file] = identifier[pmag] . identifier[resolve_file_name] ( identifier[output_spec_file] , identifier[dir_path] ) keyword[if] identifier[data_model_num] == literal[int] : identifier[meas_data] =[] identifier[meas_data3] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[meas_file] ) keyword[if] identifier[file_type] != literal[string] : identifier[print] ( literal[string] . identifier[format] ( identifier[meas_file] , identifier[file_type] )) keyword[return] keyword[False] keyword[for] identifier[rec] keyword[in] identifier[meas_data3] : identifier[meas_map] = identifier[map_magic] . identifier[meas_magic3_2_magic2_map] identifier[meas_data] . identifier[append] ( identifier[map_magic] . identifier[mapping] ( identifier[rec] , identifier[meas_map] )) identifier[old_spec_recs] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[input_spec_file] ) keyword[if] identifier[file_type] != literal[string] : identifier[print] ( literal[string] . identifier[format] ( identifier[input_spec_file] )) identifier[old_spec_recs] =[] identifier[spec_recs] =[] keyword[for] identifier[rec] keyword[in] identifier[old_spec_recs] : identifier[spec_map] = identifier[map_magic] . identifier[spec_magic3_2_magic2_map] identifier[spec_recs] . identifier[append] ( identifier[map_magic] . identifier[mapping] ( identifier[rec] , identifier[spec_map] )) keyword[else] : identifier[meas_data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[meas_file] ) keyword[if] identifier[file_type] != literal[string] : identifier[print] ( literal[string] . identifier[format] ( identifier[file_type] )) keyword[return] keyword[False] , literal[string] . identifier[format] ( identifier[meas_file] , identifier[file_type] ) identifier[meas_data] = identifier[pmag] . identifier[get_dictitem] ( identifier[meas_data] , literal[string] , literal[string] , literal[string] ) keyword[if] keyword[not] identifier[len] ( identifier[meas_data] ): identifier[print] ( literal[string] ) keyword[return] keyword[False] , literal[string] identifier[ssort] =[] keyword[for] identifier[rec] keyword[in] identifier[meas_data] : identifier[spec] = identifier[rec] [ literal[string] ] keyword[if] identifier[spec] keyword[not] keyword[in] identifier[ssort] : identifier[ssort] . identifier[append] ( identifier[spec] ) identifier[sids] = identifier[sorted] ( identifier[ssort] ) identifier[specimen] , identifier[npos] = literal[int] , literal[int] identifier[RmagSpecRecs] , identifier[RmagResRecs] =[],[] identifier[SpecRecs] , identifier[SpecRecs3] =[],[] keyword[while] identifier[specimen] < identifier[len] ( identifier[sids] ): identifier[nmeas] = literal[int] identifier[s] = identifier[sids] [ identifier[specimen] ] identifier[RmagSpecRec] ={} identifier[RmagResRec] ={} keyword[if] identifier[data_model_num] == literal[int] : keyword[if] identifier[spec_recs] : keyword[try] : identifier[RmagResRec] = identifier[pmag] . identifier[get_dictitem] ( identifier[spec_recs] , literal[string] , identifier[s] , literal[string] )[ literal[int] ] identifier[RmagSpecRec] = identifier[pmag] . identifier[get_dictitem] ( identifier[spec_recs] , literal[string] , identifier[s] , literal[string] )[ literal[int] ] keyword[except] identifier[IndexError] : keyword[pass] identifier[BX] , identifier[X] =[],[] identifier[method_codes] =[] identifier[Spec0] = literal[string] identifier[data] = identifier[pmag] . identifier[get_dictitem] ( identifier[meas_data] , literal[string] , identifier[s] , literal[string] ) keyword[if] identifier[len] ( identifier[data] )> literal[int] : identifier[RmagSpecRec] [ literal[string] ]= identifier[data] [ literal[int] ][ literal[string] ] identifier[RmagSpecRec] [ literal[string] ]= identifier[data] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ) identifier[RmagSpecRec] [ literal[string] ]= identifier[data] [ literal[int] ][ literal[string] ] keyword[if] keyword[not] literal[string] keyword[in] identifier[RmagSpecRec] : identifier[RmagSpecRec] [ literal[string] ]= identifier[data] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ) identifier[RmagSpecRec] [ literal[string] ]= identifier[data] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ) identifier[RmagSpecRec] [ literal[string] ]= identifier[RmagSpecRec] [ literal[string] ]+ literal[string] identifier[RmagSpecRec] [ literal[string] ]= literal[string] identifier[RmagResRec] [ literal[string] ]= identifier[data] [ literal[int] ][ literal[string] ]+ literal[string] identifier[RmagResRec] [ literal[string] ]= identifier[data] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ) identifier[RmagResRec] [ literal[string] ]= identifier[data] [ literal[int] ][ literal[string] ] keyword[if] identifier[data_model_num] == literal[int] : identifier[RmagResRec] [ literal[string] ]= identifier[data] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ) identifier[RmagResRec] [ literal[string] ]= identifier[data] [ literal[int] ]. identifier[get] ( literal[string] , literal[string] ) identifier[RmagResRec] [ literal[string] ]= identifier[RmagSpecRec] [ literal[string] ]+ literal[string] identifier[RmagResRec] [ literal[string] ]= literal[string] identifier[RmagSpecRec] [ literal[string] ]= literal[string] keyword[if] literal[string] keyword[in] identifier[list] ( identifier[data] [ literal[int] ]. identifier[keys] ()): identifier[RmagSpecRec] [ literal[string] ]= identifier[data] [ literal[int] ][ literal[string] ] keyword[else] : identifier[RmagSpecRec] [ literal[string] ]= literal[string] identifier[RmagSpecRec] [ literal[string] ]= literal[string] keyword[for] identifier[rec] keyword[in] identifier[data] : identifier[meths] = identifier[rec] [ literal[string] ]. identifier[strip] (). identifier[split] ( literal[string] ) identifier[Dir] =[] identifier[Dir] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ])) identifier[Dir] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ])) identifier[Dir] . identifier[append] ( identifier[float] ( identifier[rec] [ literal[string] ])) keyword[if] literal[string] keyword[in] identifier[meths] : identifier[BX] . identifier[append] ( identifier[pmag] . identifier[dir2cart] ( identifier[Dir] )) keyword[elif] literal[string] keyword[in] identifier[meths] : identifier[X] . identifier[append] ( identifier[pmag] . identifier[dir2cart] ( identifier[Dir] )) identifier[nmeas] += literal[int] keyword[if] identifier[len] ( identifier[BX] )== literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )- literal[int] ): identifier[BX] . identifier[append] ( identifier[BX] [ literal[int] ]) keyword[elif] identifier[len] ( identifier[BX] )== literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )): identifier[BX] . identifier[append] ([ literal[int] , literal[int] , literal[int] ]) keyword[elif] identifier[len] ( identifier[BX] )!= identifier[len] ( identifier[X] ): identifier[print] ( literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[X] )): identifier[BX] . identifier[append] ([ literal[int] , literal[int] , literal[int] ]) keyword[if] identifier[nmeas] < literal[int] : identifier[print] ( literal[string] , identifier[s] , literal[string] ) identifier[specimen] += literal[int] keyword[else] : identifier[B] , identifier[H] , identifier[tmpH] = identifier[pmag] . identifier[designATRM] ( identifier[npos] ) identifier[work] = identifier[np] . identifier[zeros] (( identifier[nmeas] , literal[int] ), literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nmeas] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ): identifier[work] [ identifier[i] ][ identifier[j] ]= identifier[X] [ identifier[i] ][ identifier[j] ]- identifier[BX] [ identifier[i] ][ identifier[j] ] identifier[w] = identifier[np] . identifier[zeros] (( identifier[npos] * literal[int] ), literal[string] ) identifier[index] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[npos] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ): identifier[w] [ identifier[index] ]= identifier[work] [ identifier[i] ][ identifier[j] ] identifier[index] += literal[int] identifier[s] = identifier[np] . identifier[zeros] (( literal[int] ), literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[len] ( identifier[w] )): identifier[s] [ identifier[i] ]+= identifier[B] [ identifier[i] ][ identifier[j] ]* identifier[w] [ identifier[j] ] identifier[trace] = identifier[s] [ literal[int] ]+ identifier[s] [ literal[int] ]+ identifier[s] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[s] [ identifier[i] ]= identifier[s] [ identifier[i] ]/ identifier[trace] identifier[a] = identifier[pmag] . identifier[s2a] ( identifier[s] ) identifier[S] = literal[int] identifier[comp] = identifier[np] . identifier[zeros] (( identifier[npos] * literal[int] ), literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[npos] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ): identifier[index] = identifier[i] * literal[int] + identifier[j] identifier[compare] = identifier[a] [ identifier[j] ][ literal[int] ]* identifier[tmpH] [ identifier[i] ][ literal[int] ]+ identifier[a] [ identifier[j] ][ literal[int] ]* identifier[tmpH] [ identifier[i] ][ literal[int] ]+ identifier[a] [ identifier[j] ][ literal[int] ]* identifier[tmpH] [ identifier[i] ][ literal[int] ] identifier[comp] [ identifier[index] ]= identifier[compare] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[npos] * literal[int] ): identifier[d] =( identifier[w] [ identifier[i] ]/ identifier[trace] )- identifier[comp] [ identifier[i] ] identifier[S] += identifier[d] * identifier[d] identifier[nf] = identifier[float] ( identifier[npos] * literal[int] - literal[int] ) keyword[if] identifier[S] > literal[int] : identifier[sigma] = identifier[np] . identifier[sqrt] ( identifier[S] / identifier[nf] ) keyword[else] : identifier[sigma] = literal[int] identifier[hpars] = identifier[pmag] . identifier[dohext] ( identifier[nf] , identifier[sigma] , identifier[s] ) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[s] [ literal[int] ]) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[s] [ literal[int] ]) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[s] [ literal[int] ]) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[s] [ literal[int] ]) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[s] [ literal[int] ]) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[s] [ literal[int] ]) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[trace] / literal[int] ) identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[sigma] ) identifier[RmagSpecRec] [ literal[string] ]= literal[string] identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[npos] ) identifier[RmagSpecRec] [ literal[string] ]= literal[string] identifier[RmagSpecRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagSpecRec] [ literal[string] ]= identifier[hpars] [ literal[string] ] identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] + identifier[hpars] [ literal[string] ]+ literal[string] + identifier[hpars] [ literal[string] ] keyword[if] identifier[hpars] [ literal[string] ]> identifier[hpars] [ literal[string] ]: identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) keyword[else] : identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) keyword[if] identifier[hpars] [ literal[string] ]> identifier[hpars] [ literal[string] ]: identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) keyword[else] : identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[RmagResRec] [ literal[string] ]= literal[string] identifier[RmagResRec] [ literal[string] ]= literal[string] identifier[RmagResRec] [ literal[string] ]= literal[string] identifier[RmagSpecRec] [ literal[string] ]= literal[string] identifier[RmagResRec] [ literal[string] ]= identifier[pmag] . identifier[get_version] () identifier[RmagSpecRec] [ literal[string] ]= identifier[pmag] . identifier[get_version] () identifier[RmagSpecRecs] . identifier[append] ( identifier[RmagSpecRec] ) identifier[RmagResRecs] . identifier[append] ( identifier[RmagResRec] ) identifier[specimen] += literal[int] keyword[if] identifier[data_model_num] == literal[int] : identifier[SpecRec] = identifier[RmagResRec] . identifier[copy] () identifier[SpecRec] . identifier[update] ( identifier[RmagSpecRec] ) identifier[SpecRecs] . identifier[append] ( identifier[SpecRec] ) keyword[if] identifier[data_model_num] == literal[int] : keyword[for] identifier[rec] keyword[in] identifier[SpecRecs] : identifier[rec3] = identifier[map_magic] . identifier[convert_aniso] ( literal[string] , identifier[rec] ) identifier[SpecRecs3] . identifier[append] ( identifier[rec3] ) identifier[pmag] . identifier[magic_write] ( identifier[output_spec_file] , identifier[SpecRecs3] , literal[string] ) identifier[print] ( literal[string] . identifier[format] ( identifier[output_spec_file] )) keyword[return] keyword[True] , identifier[output_spec_file] keyword[else] : identifier[pmag] . identifier[magic_write] ( identifier[rmag_anis] , identifier[RmagSpecRecs] , literal[string] ) identifier[print] ( literal[string] , identifier[rmag_anis] ) identifier[pmag] . identifier[magic_write] ( identifier[rmag_res] , identifier[RmagResRecs] , literal[string] ) identifier[print] ( literal[string] , identifier[rmag_res] ) keyword[return] keyword[True] , identifier[rmag_anis]
def atrm_magic(meas_file, dir_path='.', input_dir_path='', input_spec_file='specimens.txt', output_spec_file='specimens.txt', data_model_num=3): """ Converts ATRM data to best-fit tensor (6 elements plus sigma) Parameters ---------- meas_file : str input measurement file dir_path : str output directory, default "." input_dir_path : str input file directory IF different from dir_path, default "" input_spec_file : str input specimen file name, default "specimens.txt" output_spec_file : str output specimen file name, default "specimens.txt" data_model_num : number MagIC data model [2, 3], default 3 Returns --------- Tuple : (True or False indicating if conversion was sucessful, output file name written) """ # fix up file names (input_dir_path, dir_path) = pmag.fix_directories(input_dir_path, dir_path) meas_file = pmag.resolve_file_name(meas_file, input_dir_path) rmag_anis = os.path.join(dir_path, 'rmag_anisotropy.txt') rmag_res = os.path.join(dir_path, 'rmag_results.txt') input_spec_file = pmag.resolve_file_name(input_spec_file, input_dir_path) output_spec_file = pmag.resolve_file_name(output_spec_file, dir_path) # read in data if data_model_num == 3: meas_data = [] (meas_data3, file_type) = pmag.magic_read(meas_file) if file_type != 'measurements': print('-E- {} is not a valid measurements file, {}'.format(meas_file, file_type)) return False # depends on [control=['if'], data=['file_type']] # convert meas_data to 2.5 for rec in meas_data3: meas_map = map_magic.meas_magic3_2_magic2_map meas_data.append(map_magic.mapping(rec, meas_map)) # depends on [control=['for'], data=['rec']] (old_spec_recs, file_type) = pmag.magic_read(input_spec_file) if file_type != 'specimens': print('-W- {} is not a valid specimens file '.format(input_spec_file)) old_spec_recs = [] # depends on [control=['if'], data=[]] spec_recs = [] for rec in old_spec_recs: spec_map = map_magic.spec_magic3_2_magic2_map spec_recs.append(map_magic.mapping(rec, spec_map)) # depends on [control=['for'], data=['rec']] # depends on [control=['if'], data=[]] else: (meas_data, file_type) = pmag.magic_read(meas_file) if file_type != 'magic_measurements': print('-E- {} is is not a valid magic_measurements file '.format(file_type)) return (False, '{} is not a valid magic_measurements file, {}'.format(meas_file, file_type)) # depends on [control=['if'], data=['file_type']] meas_data = pmag.get_dictitem(meas_data, 'magic_method_codes', 'LP-AN-TRM', 'has') if not len(meas_data): print('-E- No measurement records found with code LP-AN-TRM') return (False, 'No measurement records found with code LP-AN-TRM') # depends on [control=['if'], data=[]] # # # get sorted list of unique specimen names ssort = [] for rec in meas_data: spec = rec['er_specimen_name'] if spec not in ssort: ssort.append(spec) # depends on [control=['if'], data=['spec', 'ssort']] # depends on [control=['for'], data=['rec']] sids = sorted(ssort) # # # work on each specimen # (specimen, npos) = (0, 6) (RmagSpecRecs, RmagResRecs) = ([], []) (SpecRecs, SpecRecs3) = ([], []) while specimen < len(sids): nmeas = 0 s = sids[specimen] RmagSpecRec = {} RmagResRec = {} # get old specrec here if applicable if data_model_num == 3: if spec_recs: try: RmagResRec = pmag.get_dictitem(spec_recs, 'er_specimen_name', s, 'T')[0] RmagSpecRec = pmag.get_dictitem(spec_recs, 'er_specimen_name', s, 'T')[0] # depends on [control=['try'], data=[]] except IndexError: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] (BX, X) = ([], []) method_codes = [] Spec0 = '' # # find the data from the meas_data file for this sample # and get dec, inc, int and convert to x,y,z # # fish out data for this specimen name data = pmag.get_dictitem(meas_data, 'er_specimen_name', s, 'T') if len(data) > 5: RmagSpecRec['rmag_anisotropy_name'] = data[0]['er_specimen_name'] RmagSpecRec['er_location_name'] = data[0].get('er_location_name', '') RmagSpecRec['er_specimen_name'] = data[0]['er_specimen_name'] if not 'er_sample_name' in RmagSpecRec: RmagSpecRec['er_sample_name'] = data[0].get('er_sample_name', '') # depends on [control=['if'], data=[]] RmagSpecRec['er_site_name'] = data[0].get('er_site_name', '') RmagSpecRec['magic_experiment_names'] = RmagSpecRec['rmag_anisotropy_name'] + ':ATRM' RmagSpecRec['er_citation_names'] = 'This study' RmagResRec['rmag_result_name'] = data[0]['er_specimen_name'] + ':ATRM' RmagResRec['er_location_names'] = data[0].get('er_location_names', '') RmagResRec['er_specimen_names'] = data[0]['er_specimen_name'] if data_model_num == 2: RmagResRec['er_sample_names'] = data[0].get('er_sample_name', '') # depends on [control=['if'], data=[]] RmagResRec['er_site_names'] = data[0].get('er_site_name', '') RmagResRec['magic_experiment_names'] = RmagSpecRec['rmag_anisotropy_name'] + ':ATRM' RmagResRec['er_citation_names'] = 'This study' RmagSpecRec['anisotropy_type'] = 'ATRM' if 'magic_instrument_codes' in list(data[0].keys()): RmagSpecRec['magic_instrument_codes'] = data[0]['magic_instrument_codes'] # depends on [control=['if'], data=[]] else: RmagSpecRec['magic_instrument_codes'] = '' RmagSpecRec['anisotropy_description'] = 'Hext statistics adapted to ATRM' for rec in data: meths = rec['magic_method_codes'].strip().split(':') Dir = [] Dir.append(float(rec['measurement_dec'])) Dir.append(float(rec['measurement_inc'])) Dir.append(float(rec['measurement_magn_moment'])) if 'LT-T-Z' in meths: BX.append(pmag.dir2cart(Dir)) # append baseline steps # depends on [control=['if'], data=[]] elif 'LT-T-I' in meths: X.append(pmag.dir2cart(Dir)) nmeas += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] # depends on [control=['if'], data=[]] # if len(BX) == 1: for i in range(len(X) - 1): BX.append(BX[0]) # assume first 0 field step as baseline # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif len(BX) == 0: # assume baseline is zero for i in range(len(X)): BX.append([0.0, 0.0, 0.0]) # assume baseline of 0 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif len(BX) != len(X): # if BX isn't just one measurement or one in between every infield step, just assume it is zero print('something odd about the baselines - just assuming zero') for i in range(len(X)): BX.append([0.0, 0.0, 0.0]) # assume baseline of 0 # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if nmeas < 6: # must have at least 6 measurements right now - print('skipping specimen ', s, ' too few measurements') specimen += 1 # depends on [control=['if'], data=[]] else: # B matrix made from design matrix for positions (B, H, tmpH) = pmag.designATRM(npos) # # subtract optional baseline and put in a work array # work = np.zeros((nmeas, 3), 'f') for i in range(nmeas): for j in range(3): # subtract baseline, if available work[i][j] = X[i][j] - BX[i][j] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] # # calculate tensor elements # first put ARM components in w vector # w = np.zeros(npos * 3, 'f') index = 0 for i in range(npos): for j in range(3): w[index] = work[i][j] index += 1 # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] s = np.zeros(6, 'f') # initialize the s matrix for i in range(6): for j in range(len(w)): s[i] += B[i][j] * w[j] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] trace = s[0] + s[1] + s[2] # normalize by the trace for i in range(6): s[i] = s[i] / trace # depends on [control=['for'], data=['i']] a = pmag.s2a(s) # ------------------------------------------------------------ # Calculating dels is different than in the Kappabridge # routine. Use trace normalized tensor (a) and the applied # unit field directions (tmpH) to generate model X,Y,Z # components. Then compare these with the measured values. # ------------------------------------------------------------ S = 0.0 comp = np.zeros(npos * 3, 'f') for i in range(npos): for j in range(3): index = i * 3 + j compare = a[j][0] * tmpH[i][0] + a[j][1] * tmpH[i][1] + a[j][2] * tmpH[i][2] comp[index] = compare # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] for i in range(npos * 3): d = w[i] / trace - comp[i] # del values S += d * d # depends on [control=['for'], data=['i']] nf = float(npos * 3.0 - 6.0) # number of degrees of freedom if S > 0: sigma = np.sqrt(S / nf) # depends on [control=['if'], data=['S']] else: sigma = 0 hpars = pmag.dohext(nf, sigma, s) # # prepare for output # RmagSpecRec['anisotropy_s1'] = '%8.6f' % s[0] RmagSpecRec['anisotropy_s2'] = '%8.6f' % s[1] RmagSpecRec['anisotropy_s3'] = '%8.6f' % s[2] RmagSpecRec['anisotropy_s4'] = '%8.6f' % s[3] RmagSpecRec['anisotropy_s5'] = '%8.6f' % s[4] RmagSpecRec['anisotropy_s6'] = '%8.6f' % s[5] RmagSpecRec['anisotropy_mean'] = '%8.3e' % (trace / 3) RmagSpecRec['anisotropy_sigma'] = '%8.6f' % sigma RmagSpecRec['anisotropy_unit'] = 'Am^2' RmagSpecRec['anisotropy_n'] = '%i' % npos RmagSpecRec['anisotropy_tilt_correction'] = '-1' # used by thellier_gui - must be taken out for uploading RmagSpecRec['anisotropy_F'] = '%7.1f ' % hpars['F'] # used by thellier_gui - must be taken out for uploading RmagSpecRec['anisotropy_F_crit'] = hpars['F_crit'] RmagResRec['anisotropy_t1'] = '%8.6f ' % hpars['t1'] RmagResRec['anisotropy_t2'] = '%8.6f ' % hpars['t2'] RmagResRec['anisotropy_t3'] = '%8.6f ' % hpars['t3'] RmagResRec['anisotropy_v1_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v2_dec'] = '%7.1f ' % hpars['v2_dec'] RmagResRec['anisotropy_v3_dec'] = '%7.1f ' % hpars['v3_dec'] RmagResRec['anisotropy_v1_inc'] = '%7.1f ' % hpars['v1_inc'] RmagResRec['anisotropy_v2_inc'] = '%7.1f ' % hpars['v2_inc'] RmagResRec['anisotropy_v3_inc'] = '%7.1f ' % hpars['v3_inc'] RmagResRec['anisotropy_ftest'] = '%7.1f ' % hpars['F'] RmagResRec['anisotropy_ftest12'] = '%7.1f ' % hpars['F12'] RmagResRec['anisotropy_ftest23'] = '%7.1f ' % hpars['F23'] RmagResRec['result_description'] = 'Critical F: ' + hpars['F_crit'] + ';Critical F12/F13: ' + hpars['F12_crit'] if hpars['e12'] > hpars['e13']: RmagResRec['anisotropy_v1_zeta_semi_angle'] = '%7.1f ' % hpars['e12'] RmagResRec['anisotropy_v1_zeta_dec'] = '%7.1f ' % hpars['v2_dec'] RmagResRec['anisotropy_v1_zeta_inc'] = '%7.1f ' % hpars['v2_inc'] RmagResRec['anisotropy_v2_zeta_semi_angle'] = '%7.1f ' % hpars['e12'] RmagResRec['anisotropy_v2_zeta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v2_zeta_inc'] = '%7.1f ' % hpars['v1_inc'] RmagResRec['anisotropy_v1_eta_semi_angle'] = '%7.1f ' % hpars['e13'] RmagResRec['anisotropy_v1_eta_dec'] = '%7.1f ' % hpars['v3_dec'] RmagResRec['anisotropy_v1_eta_inc'] = '%7.1f ' % hpars['v3_inc'] RmagResRec['anisotropy_v3_eta_semi_angle'] = '%7.1f ' % hpars['e13'] RmagResRec['anisotropy_v3_eta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v3_eta_inc'] = '%7.1f ' % hpars['v1_inc'] # depends on [control=['if'], data=[]] else: RmagResRec['anisotropy_v1_zeta_semi_angle'] = '%7.1f ' % hpars['e13'] RmagResRec['anisotropy_v1_zeta_dec'] = '%7.1f ' % hpars['v3_dec'] RmagResRec['anisotropy_v1_zeta_inc'] = '%7.1f ' % hpars['v3_inc'] RmagResRec['anisotropy_v3_zeta_semi_angle'] = '%7.1f ' % hpars['e13'] RmagResRec['anisotropy_v3_zeta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v3_zeta_inc'] = '%7.1f ' % hpars['v1_inc'] RmagResRec['anisotropy_v1_eta_semi_angle'] = '%7.1f ' % hpars['e12'] RmagResRec['anisotropy_v1_eta_dec'] = '%7.1f ' % hpars['v2_dec'] RmagResRec['anisotropy_v1_eta_inc'] = '%7.1f ' % hpars['v2_inc'] RmagResRec['anisotropy_v2_eta_semi_angle'] = '%7.1f ' % hpars['e12'] RmagResRec['anisotropy_v2_eta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v2_eta_inc'] = '%7.1f ' % hpars['v1_inc'] if hpars['e23'] > hpars['e12']: RmagResRec['anisotropy_v2_zeta_semi_angle'] = '%7.1f ' % hpars['e23'] RmagResRec['anisotropy_v2_zeta_dec'] = '%7.1f ' % hpars['v3_dec'] RmagResRec['anisotropy_v2_zeta_inc'] = '%7.1f ' % hpars['v3_inc'] RmagResRec['anisotropy_v3_zeta_semi_angle'] = '%7.1f ' % hpars['e23'] RmagResRec['anisotropy_v3_zeta_dec'] = '%7.1f ' % hpars['v2_dec'] RmagResRec['anisotropy_v3_zeta_inc'] = '%7.1f ' % hpars['v2_inc'] RmagResRec['anisotropy_v3_eta_semi_angle'] = '%7.1f ' % hpars['e13'] RmagResRec['anisotropy_v3_eta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v3_eta_inc'] = '%7.1f ' % hpars['v1_inc'] RmagResRec['anisotropy_v2_eta_semi_angle'] = '%7.1f ' % hpars['e12'] RmagResRec['anisotropy_v2_eta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v2_eta_inc'] = '%7.1f ' % hpars['v1_inc'] # depends on [control=['if'], data=[]] else: RmagResRec['anisotropy_v2_zeta_semi_angle'] = '%7.1f ' % hpars['e12'] RmagResRec['anisotropy_v2_zeta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v2_zeta_inc'] = '%7.1f ' % hpars['v1_inc'] RmagResRec['anisotropy_v3_eta_semi_angle'] = '%7.1f ' % hpars['e23'] RmagResRec['anisotropy_v3_eta_dec'] = '%7.1f ' % hpars['v2_dec'] RmagResRec['anisotropy_v3_eta_inc'] = '%7.1f ' % hpars['v2_inc'] RmagResRec['anisotropy_v3_zeta_semi_angle'] = '%7.1f ' % hpars['e13'] RmagResRec['anisotropy_v3_zeta_dec'] = '%7.1f ' % hpars['v1_dec'] RmagResRec['anisotropy_v3_zeta_inc'] = '%7.1f ' % hpars['v1_inc'] RmagResRec['anisotropy_v2_eta_semi_angle'] = '%7.1f ' % hpars['e23'] RmagResRec['anisotropy_v2_eta_dec'] = '%7.1f ' % hpars['v3_dec'] RmagResRec['anisotropy_v2_eta_inc'] = '%7.1f ' % hpars['v3_inc'] RmagResRec['tilt_correction'] = '-1' RmagResRec['anisotropy_type'] = 'ATRM' RmagResRec['magic_method_codes'] = 'LP-AN-TRM:AE-H' RmagSpecRec['magic_method_codes'] = 'LP-AN-TRM:AE-H' RmagResRec['magic_software_packages'] = pmag.get_version() RmagSpecRec['magic_software_packages'] = pmag.get_version() RmagSpecRecs.append(RmagSpecRec) RmagResRecs.append(RmagResRec) specimen += 1 if data_model_num == 3: SpecRec = RmagResRec.copy() SpecRec.update(RmagSpecRec) SpecRecs.append(SpecRec) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['specimen']] # finished iterating through specimens, # now we need to write out the data to files if data_model_num == 3: # translate records for rec in SpecRecs: rec3 = map_magic.convert_aniso('magic3', rec) SpecRecs3.append(rec3) # depends on [control=['for'], data=['rec']] # write output to 3.0 specimens file pmag.magic_write(output_spec_file, SpecRecs3, 'specimens') print('specimen data stored in {}'.format(output_spec_file)) return (True, output_spec_file) # depends on [control=['if'], data=[]] else: # write output to 2.5 rmag_ files pmag.magic_write(rmag_anis, RmagSpecRecs, 'rmag_anisotropy') print('specimen tensor elements stored in ', rmag_anis) pmag.magic_write(rmag_res, RmagResRecs, 'rmag_results') print('specimen statistics and eigenparameters stored in ', rmag_res) return (True, rmag_anis)
def encode(self, data, content_encoding="aes128gcm"): """Encrypt the data. :param data: A serialized block of byte data (String, JSON, bit array, etc.) Make sure that whatever you send, your client knows how to understand it. :type data: str :param content_encoding: The content_encoding type to use to encrypt the data. Defaults to RFC8188 "aes128gcm". The previous draft-01 is "aesgcm", however this format is now deprecated. :type content_encoding: enum("aesgcm", "aes128gcm") """ # Salt is a random 16 byte array. if not data: return if not self.auth_key or not self.receiver_key: raise WebPushException("No keys specified in subscription info") salt = None if content_encoding not in self.valid_encodings: raise WebPushException("Invalid content encoding specified. " "Select from " + json.dumps(self.valid_encodings)) if content_encoding == "aesgcm": salt = os.urandom(16) # The server key is an ephemeral ECDH key used only for this # transaction server_key = ec.generate_private_key(ec.SECP256R1, default_backend()) crypto_key = server_key.public_key().public_bytes( encoding=serialization.Encoding.X962, format=serialization.PublicFormat.UncompressedPoint ) if isinstance(data, six.string_types): data = bytes(data.encode('utf8')) if content_encoding == "aes128gcm": encrypted = http_ece.encrypt( data, salt=salt, private_key=server_key, dh=self.receiver_key, auth_secret=self.auth_key, version=content_encoding) reply = CaseInsensitiveDict({ 'body': encrypted }) else: crypto_key = base64.urlsafe_b64encode(crypto_key).strip(b'=') encrypted = http_ece.encrypt( data, salt=salt, private_key=server_key, keyid=crypto_key.decode(), dh=self.receiver_key, auth_secret=self.auth_key, version=content_encoding) reply = CaseInsensitiveDict({ 'crypto_key': crypto_key, 'body': encrypted, }) if salt: reply['salt'] = base64.urlsafe_b64encode(salt).strip(b'=') return reply
def function[encode, parameter[self, data, content_encoding]]: constant[Encrypt the data. :param data: A serialized block of byte data (String, JSON, bit array, etc.) Make sure that whatever you send, your client knows how to understand it. :type data: str :param content_encoding: The content_encoding type to use to encrypt the data. Defaults to RFC8188 "aes128gcm". The previous draft-01 is "aesgcm", however this format is now deprecated. :type content_encoding: enum("aesgcm", "aes128gcm") ] if <ast.UnaryOp object at 0x7da1b12c4be0> begin[:] return[None] if <ast.BoolOp object at 0x7da1b12c54b0> begin[:] <ast.Raise object at 0x7da1b12c5fc0> variable[salt] assign[=] constant[None] if compare[name[content_encoding] <ast.NotIn object at 0x7da2590d7190> name[self].valid_encodings] begin[:] <ast.Raise object at 0x7da1b12c4790> if compare[name[content_encoding] equal[==] constant[aesgcm]] begin[:] variable[salt] assign[=] call[name[os].urandom, parameter[constant[16]]] variable[server_key] assign[=] call[name[ec].generate_private_key, parameter[name[ec].SECP256R1, call[name[default_backend], parameter[]]]] variable[crypto_key] assign[=] call[call[name[server_key].public_key, parameter[]].public_bytes, parameter[]] if call[name[isinstance], parameter[name[data], name[six].string_types]] begin[:] variable[data] assign[=] call[name[bytes], parameter[call[name[data].encode, parameter[constant[utf8]]]]] if compare[name[content_encoding] equal[==] constant[aes128gcm]] begin[:] variable[encrypted] assign[=] call[name[http_ece].encrypt, parameter[name[data]]] variable[reply] assign[=] call[name[CaseInsensitiveDict], parameter[dictionary[[<ast.Constant object at 0x7da1b1251ab0>], [<ast.Name object at 0x7da1b1253190>]]]] return[name[reply]]
keyword[def] identifier[encode] ( identifier[self] , identifier[data] , identifier[content_encoding] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[data] : keyword[return] keyword[if] keyword[not] identifier[self] . identifier[auth_key] keyword[or] keyword[not] identifier[self] . identifier[receiver_key] : keyword[raise] identifier[WebPushException] ( literal[string] ) identifier[salt] = keyword[None] keyword[if] identifier[content_encoding] keyword[not] keyword[in] identifier[self] . identifier[valid_encodings] : keyword[raise] identifier[WebPushException] ( literal[string] literal[string] + identifier[json] . identifier[dumps] ( identifier[self] . identifier[valid_encodings] )) keyword[if] identifier[content_encoding] == literal[string] : identifier[salt] = identifier[os] . identifier[urandom] ( literal[int] ) identifier[server_key] = identifier[ec] . identifier[generate_private_key] ( identifier[ec] . identifier[SECP256R1] , identifier[default_backend] ()) identifier[crypto_key] = identifier[server_key] . identifier[public_key] (). identifier[public_bytes] ( identifier[encoding] = identifier[serialization] . identifier[Encoding] . identifier[X962] , identifier[format] = identifier[serialization] . identifier[PublicFormat] . identifier[UncompressedPoint] ) keyword[if] identifier[isinstance] ( identifier[data] , identifier[six] . identifier[string_types] ): identifier[data] = identifier[bytes] ( identifier[data] . identifier[encode] ( literal[string] )) keyword[if] identifier[content_encoding] == literal[string] : identifier[encrypted] = identifier[http_ece] . identifier[encrypt] ( identifier[data] , identifier[salt] = identifier[salt] , identifier[private_key] = identifier[server_key] , identifier[dh] = identifier[self] . identifier[receiver_key] , identifier[auth_secret] = identifier[self] . identifier[auth_key] , identifier[version] = identifier[content_encoding] ) identifier[reply] = identifier[CaseInsensitiveDict] ({ literal[string] : identifier[encrypted] }) keyword[else] : identifier[crypto_key] = identifier[base64] . identifier[urlsafe_b64encode] ( identifier[crypto_key] ). identifier[strip] ( literal[string] ) identifier[encrypted] = identifier[http_ece] . identifier[encrypt] ( identifier[data] , identifier[salt] = identifier[salt] , identifier[private_key] = identifier[server_key] , identifier[keyid] = identifier[crypto_key] . identifier[decode] (), identifier[dh] = identifier[self] . identifier[receiver_key] , identifier[auth_secret] = identifier[self] . identifier[auth_key] , identifier[version] = identifier[content_encoding] ) identifier[reply] = identifier[CaseInsensitiveDict] ({ literal[string] : identifier[crypto_key] , literal[string] : identifier[encrypted] , }) keyword[if] identifier[salt] : identifier[reply] [ literal[string] ]= identifier[base64] . identifier[urlsafe_b64encode] ( identifier[salt] ). identifier[strip] ( literal[string] ) keyword[return] identifier[reply]
def encode(self, data, content_encoding='aes128gcm'): """Encrypt the data. :param data: A serialized block of byte data (String, JSON, bit array, etc.) Make sure that whatever you send, your client knows how to understand it. :type data: str :param content_encoding: The content_encoding type to use to encrypt the data. Defaults to RFC8188 "aes128gcm". The previous draft-01 is "aesgcm", however this format is now deprecated. :type content_encoding: enum("aesgcm", "aes128gcm") """ # Salt is a random 16 byte array. if not data: return # depends on [control=['if'], data=[]] if not self.auth_key or not self.receiver_key: raise WebPushException('No keys specified in subscription info') # depends on [control=['if'], data=[]] salt = None if content_encoding not in self.valid_encodings: raise WebPushException('Invalid content encoding specified. Select from ' + json.dumps(self.valid_encodings)) # depends on [control=['if'], data=[]] if content_encoding == 'aesgcm': salt = os.urandom(16) # depends on [control=['if'], data=[]] # The server key is an ephemeral ECDH key used only for this # transaction server_key = ec.generate_private_key(ec.SECP256R1, default_backend()) crypto_key = server_key.public_key().public_bytes(encoding=serialization.Encoding.X962, format=serialization.PublicFormat.UncompressedPoint) if isinstance(data, six.string_types): data = bytes(data.encode('utf8')) # depends on [control=['if'], data=[]] if content_encoding == 'aes128gcm': encrypted = http_ece.encrypt(data, salt=salt, private_key=server_key, dh=self.receiver_key, auth_secret=self.auth_key, version=content_encoding) reply = CaseInsensitiveDict({'body': encrypted}) # depends on [control=['if'], data=['content_encoding']] else: crypto_key = base64.urlsafe_b64encode(crypto_key).strip(b'=') encrypted = http_ece.encrypt(data, salt=salt, private_key=server_key, keyid=crypto_key.decode(), dh=self.receiver_key, auth_secret=self.auth_key, version=content_encoding) reply = CaseInsensitiveDict({'crypto_key': crypto_key, 'body': encrypted}) if salt: reply['salt'] = base64.urlsafe_b64encode(salt).strip(b'=') # depends on [control=['if'], data=[]] return reply
def get_anchor_diff(anchor): """Get the get_anchor_diff between an anchor and the current state of its source. Returns: A tuple of get_anchor_diff lines. If there is not different, then this returns an empty tuple. """ new_anchor = make_anchor( file_path=anchor.file_path, offset=anchor.context.offset, width=len(anchor.context.topic), context_width=anchor.context.width, metadata=anchor.metadata) assert anchor.file_path == new_anchor.file_path assert anchor.context.offset == new_anchor.context.offset assert len(anchor.context.topic) == len(new_anchor.context.topic) assert anchor.metadata == new_anchor.metadata return tuple( _context_diff( anchor.file_path, anchor.context, new_anchor.context))
def function[get_anchor_diff, parameter[anchor]]: constant[Get the get_anchor_diff between an anchor and the current state of its source. Returns: A tuple of get_anchor_diff lines. If there is not different, then this returns an empty tuple. ] variable[new_anchor] assign[=] call[name[make_anchor], parameter[]] assert[compare[name[anchor].file_path equal[==] name[new_anchor].file_path]] assert[compare[name[anchor].context.offset equal[==] name[new_anchor].context.offset]] assert[compare[call[name[len], parameter[name[anchor].context.topic]] equal[==] call[name[len], parameter[name[new_anchor].context.topic]]]] assert[compare[name[anchor].metadata equal[==] name[new_anchor].metadata]] return[call[name[tuple], parameter[call[name[_context_diff], parameter[name[anchor].file_path, name[anchor].context, name[new_anchor].context]]]]]
keyword[def] identifier[get_anchor_diff] ( identifier[anchor] ): literal[string] identifier[new_anchor] = identifier[make_anchor] ( identifier[file_path] = identifier[anchor] . identifier[file_path] , identifier[offset] = identifier[anchor] . identifier[context] . identifier[offset] , identifier[width] = identifier[len] ( identifier[anchor] . identifier[context] . identifier[topic] ), identifier[context_width] = identifier[anchor] . identifier[context] . identifier[width] , identifier[metadata] = identifier[anchor] . identifier[metadata] ) keyword[assert] identifier[anchor] . identifier[file_path] == identifier[new_anchor] . identifier[file_path] keyword[assert] identifier[anchor] . identifier[context] . identifier[offset] == identifier[new_anchor] . identifier[context] . identifier[offset] keyword[assert] identifier[len] ( identifier[anchor] . identifier[context] . identifier[topic] )== identifier[len] ( identifier[new_anchor] . identifier[context] . identifier[topic] ) keyword[assert] identifier[anchor] . identifier[metadata] == identifier[new_anchor] . identifier[metadata] keyword[return] identifier[tuple] ( identifier[_context_diff] ( identifier[anchor] . identifier[file_path] , identifier[anchor] . identifier[context] , identifier[new_anchor] . identifier[context] ))
def get_anchor_diff(anchor): """Get the get_anchor_diff between an anchor and the current state of its source. Returns: A tuple of get_anchor_diff lines. If there is not different, then this returns an empty tuple. """ new_anchor = make_anchor(file_path=anchor.file_path, offset=anchor.context.offset, width=len(anchor.context.topic), context_width=anchor.context.width, metadata=anchor.metadata) assert anchor.file_path == new_anchor.file_path assert anchor.context.offset == new_anchor.context.offset assert len(anchor.context.topic) == len(new_anchor.context.topic) assert anchor.metadata == new_anchor.metadata return tuple(_context_diff(anchor.file_path, anchor.context, new_anchor.context))
def broadcast_change(): ''' Refresh the windows environment. .. note:: This will only effect new processes and windows. Services will not see the change until the system restarts. Returns: bool: True if successful, otherwise False Usage: .. code-block:: python import salt.utils.win_reg winreg.broadcast_change() ''' # https://msdn.microsoft.com/en-us/library/windows/desktop/ms644952(v=vs.85).aspx _, res = win32gui.SendMessageTimeout( win32con.HWND_BROADCAST, win32con.WM_SETTINGCHANGE, 0, 0, win32con.SMTO_ABORTIFHUNG, 5000) return not bool(res)
def function[broadcast_change, parameter[]]: constant[ Refresh the windows environment. .. note:: This will only effect new processes and windows. Services will not see the change until the system restarts. Returns: bool: True if successful, otherwise False Usage: .. code-block:: python import salt.utils.win_reg winreg.broadcast_change() ] <ast.Tuple object at 0x7da1b200a1a0> assign[=] call[name[win32gui].SendMessageTimeout, parameter[name[win32con].HWND_BROADCAST, name[win32con].WM_SETTINGCHANGE, constant[0], constant[0], name[win32con].SMTO_ABORTIFHUNG, constant[5000]]] return[<ast.UnaryOp object at 0x7da1b20089d0>]
keyword[def] identifier[broadcast_change] (): literal[string] identifier[_] , identifier[res] = identifier[win32gui] . identifier[SendMessageTimeout] ( identifier[win32con] . identifier[HWND_BROADCAST] , identifier[win32con] . identifier[WM_SETTINGCHANGE] , literal[int] , literal[int] , identifier[win32con] . identifier[SMTO_ABORTIFHUNG] , literal[int] ) keyword[return] keyword[not] identifier[bool] ( identifier[res] )
def broadcast_change(): """ Refresh the windows environment. .. note:: This will only effect new processes and windows. Services will not see the change until the system restarts. Returns: bool: True if successful, otherwise False Usage: .. code-block:: python import salt.utils.win_reg winreg.broadcast_change() """ # https://msdn.microsoft.com/en-us/library/windows/desktop/ms644952(v=vs.85).aspx (_, res) = win32gui.SendMessageTimeout(win32con.HWND_BROADCAST, win32con.WM_SETTINGCHANGE, 0, 0, win32con.SMTO_ABORTIFHUNG, 5000) return not bool(res)
def p_expr_assign_op(p): '''expr : variable PLUS_EQUAL expr | variable MINUS_EQUAL expr | variable MUL_EQUAL expr | variable DIV_EQUAL expr | variable CONCAT_EQUAL expr | variable MOD_EQUAL expr | variable AND_EQUAL expr | variable OR_EQUAL expr | variable XOR_EQUAL expr | variable SL_EQUAL expr | variable SR_EQUAL expr''' p[0] = ast.AssignOp(p[2], p[1], p[3], lineno=p.lineno(2))
def function[p_expr_assign_op, parameter[p]]: constant[expr : variable PLUS_EQUAL expr | variable MINUS_EQUAL expr | variable MUL_EQUAL expr | variable DIV_EQUAL expr | variable CONCAT_EQUAL expr | variable MOD_EQUAL expr | variable AND_EQUAL expr | variable OR_EQUAL expr | variable XOR_EQUAL expr | variable SL_EQUAL expr | variable SR_EQUAL expr] call[name[p]][constant[0]] assign[=] call[name[ast].AssignOp, parameter[call[name[p]][constant[2]], call[name[p]][constant[1]], call[name[p]][constant[3]]]]
keyword[def] identifier[p_expr_assign_op] ( identifier[p] ): literal[string] identifier[p] [ literal[int] ]= identifier[ast] . identifier[AssignOp] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[p] [ literal[int] ], identifier[lineno] = identifier[p] . identifier[lineno] ( literal[int] ))
def p_expr_assign_op(p): """expr : variable PLUS_EQUAL expr | variable MINUS_EQUAL expr | variable MUL_EQUAL expr | variable DIV_EQUAL expr | variable CONCAT_EQUAL expr | variable MOD_EQUAL expr | variable AND_EQUAL expr | variable OR_EQUAL expr | variable XOR_EQUAL expr | variable SL_EQUAL expr | variable SR_EQUAL expr""" p[0] = ast.AssignOp(p[2], p[1], p[3], lineno=p.lineno(2))
def certs(self, entity_id, descriptor, use="signing"): ''' Returns certificates for the given Entity ''' ent = self[entity_id] def extract_certs(srvs): res = [] for srv in srvs: if "key_descriptor" in srv: for key in srv["key_descriptor"]: if "use" in key and key["use"] == use: for dat in key["key_info"]["x509_data"]: cert = repack_cert( dat["x509_certificate"]["text"]) if cert not in res: res.append(cert) elif not "use" in key: for dat in key["key_info"]["x509_data"]: cert = repack_cert( dat["x509_certificate"]["text"]) if cert not in res: res.append(cert) return res if descriptor == "any": res = [] for descr in ["spsso", "idpsso", "role", "authn_authority", "attribute_authority", "pdp"]: try: srvs = ent["%s_descriptor" % descr] except KeyError: continue res.extend(extract_certs(srvs)) else: srvs = ent["%s_descriptor" % descriptor] res = extract_certs(srvs) return res
def function[certs, parameter[self, entity_id, descriptor, use]]: constant[ Returns certificates for the given Entity ] variable[ent] assign[=] call[name[self]][name[entity_id]] def function[extract_certs, parameter[srvs]]: variable[res] assign[=] list[[]] for taget[name[srv]] in starred[name[srvs]] begin[:] if compare[constant[key_descriptor] in name[srv]] begin[:] for taget[name[key]] in starred[call[name[srv]][constant[key_descriptor]]] begin[:] if <ast.BoolOp object at 0x7da2041d9b10> begin[:] for taget[name[dat]] in starred[call[call[name[key]][constant[key_info]]][constant[x509_data]]] begin[:] variable[cert] assign[=] call[name[repack_cert], parameter[call[call[name[dat]][constant[x509_certificate]]][constant[text]]]] if compare[name[cert] <ast.NotIn object at 0x7da2590d7190> name[res]] begin[:] call[name[res].append, parameter[name[cert]]] return[name[res]] if compare[name[descriptor] equal[==] constant[any]] begin[:] variable[res] assign[=] list[[]] for taget[name[descr]] in starred[list[[<ast.Constant object at 0x7da2041d9d80>, <ast.Constant object at 0x7da20c993e20>, <ast.Constant object at 0x7da20c991de0>, <ast.Constant object at 0x7da20c990a60>, <ast.Constant object at 0x7da20c9909a0>, <ast.Constant object at 0x7da20c9906d0>]]] begin[:] <ast.Try object at 0x7da20c991ea0> call[name[res].extend, parameter[call[name[extract_certs], parameter[name[srvs]]]]] return[name[res]]
keyword[def] identifier[certs] ( identifier[self] , identifier[entity_id] , identifier[descriptor] , identifier[use] = literal[string] ): literal[string] identifier[ent] = identifier[self] [ identifier[entity_id] ] keyword[def] identifier[extract_certs] ( identifier[srvs] ): identifier[res] =[] keyword[for] identifier[srv] keyword[in] identifier[srvs] : keyword[if] literal[string] keyword[in] identifier[srv] : keyword[for] identifier[key] keyword[in] identifier[srv] [ literal[string] ]: keyword[if] literal[string] keyword[in] identifier[key] keyword[and] identifier[key] [ literal[string] ]== identifier[use] : keyword[for] identifier[dat] keyword[in] identifier[key] [ literal[string] ][ literal[string] ]: identifier[cert] = identifier[repack_cert] ( identifier[dat] [ literal[string] ][ literal[string] ]) keyword[if] identifier[cert] keyword[not] keyword[in] identifier[res] : identifier[res] . identifier[append] ( identifier[cert] ) keyword[elif] keyword[not] literal[string] keyword[in] identifier[key] : keyword[for] identifier[dat] keyword[in] identifier[key] [ literal[string] ][ literal[string] ]: identifier[cert] = identifier[repack_cert] ( identifier[dat] [ literal[string] ][ literal[string] ]) keyword[if] identifier[cert] keyword[not] keyword[in] identifier[res] : identifier[res] . identifier[append] ( identifier[cert] ) keyword[return] identifier[res] keyword[if] identifier[descriptor] == literal[string] : identifier[res] =[] keyword[for] identifier[descr] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[try] : identifier[srvs] = identifier[ent] [ literal[string] % identifier[descr] ] keyword[except] identifier[KeyError] : keyword[continue] identifier[res] . identifier[extend] ( identifier[extract_certs] ( identifier[srvs] )) keyword[else] : identifier[srvs] = identifier[ent] [ literal[string] % identifier[descriptor] ] identifier[res] = identifier[extract_certs] ( identifier[srvs] ) keyword[return] identifier[res]
def certs(self, entity_id, descriptor, use='signing'): """ Returns certificates for the given Entity """ ent = self[entity_id] def extract_certs(srvs): res = [] for srv in srvs: if 'key_descriptor' in srv: for key in srv['key_descriptor']: if 'use' in key and key['use'] == use: for dat in key['key_info']['x509_data']: cert = repack_cert(dat['x509_certificate']['text']) if cert not in res: res.append(cert) # depends on [control=['if'], data=['cert', 'res']] # depends on [control=['for'], data=['dat']] # depends on [control=['if'], data=[]] elif not 'use' in key: for dat in key['key_info']['x509_data']: cert = repack_cert(dat['x509_certificate']['text']) if cert not in res: res.append(cert) # depends on [control=['if'], data=['cert', 'res']] # depends on [control=['for'], data=['dat']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=['srv']] # depends on [control=['for'], data=['srv']] return res if descriptor == 'any': res = [] for descr in ['spsso', 'idpsso', 'role', 'authn_authority', 'attribute_authority', 'pdp']: try: srvs = ent['%s_descriptor' % descr] # depends on [control=['try'], data=[]] except KeyError: continue # depends on [control=['except'], data=[]] res.extend(extract_certs(srvs)) # depends on [control=['for'], data=['descr']] # depends on [control=['if'], data=[]] else: srvs = ent['%s_descriptor' % descriptor] res = extract_certs(srvs) return res
def cb(self, min_volume=0): """以字典形式返回QDII数据 :param min_volume:最小交易量,单位万元 """ # 添加当前的ctime self.__cb_url = self.__cb_url.format(ctime=int(time.time())) # 请求数据 rep = requests.get(self.__cb_url) # 获取返回的json字符串 fundjson = json.loads(rep.text) # 格式化返回的json字符串 data = self.formatjisilujson(fundjson) # 过滤小于指定交易量的数据 if min_volume: data = { k: data[k] for k in data if float(data[k]["volume"]) > min_volume } self.__cb = data return self.__cb
def function[cb, parameter[self, min_volume]]: constant[以字典形式返回QDII数据 :param min_volume:最小交易量,单位万元 ] name[self].__cb_url assign[=] call[name[self].__cb_url.format, parameter[]] variable[rep] assign[=] call[name[requests].get, parameter[name[self].__cb_url]] variable[fundjson] assign[=] call[name[json].loads, parameter[name[rep].text]] variable[data] assign[=] call[name[self].formatjisilujson, parameter[name[fundjson]]] if name[min_volume] begin[:] variable[data] assign[=] <ast.DictComp object at 0x7da18bc70f40> name[self].__cb assign[=] name[data] return[name[self].__cb]
keyword[def] identifier[cb] ( identifier[self] , identifier[min_volume] = literal[int] ): literal[string] identifier[self] . identifier[__cb_url] = identifier[self] . identifier[__cb_url] . identifier[format] ( identifier[ctime] = identifier[int] ( identifier[time] . identifier[time] ())) identifier[rep] = identifier[requests] . identifier[get] ( identifier[self] . identifier[__cb_url] ) identifier[fundjson] = identifier[json] . identifier[loads] ( identifier[rep] . identifier[text] ) identifier[data] = identifier[self] . identifier[formatjisilujson] ( identifier[fundjson] ) keyword[if] identifier[min_volume] : identifier[data] ={ identifier[k] : identifier[data] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[data] keyword[if] identifier[float] ( identifier[data] [ identifier[k] ][ literal[string] ])> identifier[min_volume] } identifier[self] . identifier[__cb] = identifier[data] keyword[return] identifier[self] . identifier[__cb]
def cb(self, min_volume=0): """以字典形式返回QDII数据 :param min_volume:最小交易量,单位万元 """ # 添加当前的ctime self.__cb_url = self.__cb_url.format(ctime=int(time.time())) # 请求数据 rep = requests.get(self.__cb_url) # 获取返回的json字符串 fundjson = json.loads(rep.text) # 格式化返回的json字符串 data = self.formatjisilujson(fundjson) # 过滤小于指定交易量的数据 if min_volume: data = {k: data[k] for k in data if float(data[k]['volume']) > min_volume} # depends on [control=['if'], data=[]] self.__cb = data return self.__cb
def _executor_script(self): """Create shell-script in charge of executing the benchmark and return its path. """ fd, path = tempfile.mkstemp(suffix='.sh', dir=os.getcwd()) os.close(fd) with open(path, 'w') as ostr: self._write_executor_script(ostr) mode = os.stat(path).st_mode os.chmod(path, mode | stat.S_IEXEC | stat.S_IRGRP | stat.S_IRUSR) return path
def function[_executor_script, parameter[self]]: constant[Create shell-script in charge of executing the benchmark and return its path. ] <ast.Tuple object at 0x7da1b26add20> assign[=] call[name[tempfile].mkstemp, parameter[]] call[name[os].close, parameter[name[fd]]] with call[name[open], parameter[name[path], constant[w]]] begin[:] call[name[self]._write_executor_script, parameter[name[ostr]]] variable[mode] assign[=] call[name[os].stat, parameter[name[path]]].st_mode call[name[os].chmod, parameter[name[path], binary_operation[binary_operation[binary_operation[name[mode] <ast.BitOr object at 0x7da2590d6aa0> name[stat].S_IEXEC] <ast.BitOr object at 0x7da2590d6aa0> name[stat].S_IRGRP] <ast.BitOr object at 0x7da2590d6aa0> name[stat].S_IRUSR]]] return[name[path]]
keyword[def] identifier[_executor_script] ( identifier[self] ): literal[string] identifier[fd] , identifier[path] = identifier[tempfile] . identifier[mkstemp] ( identifier[suffix] = literal[string] , identifier[dir] = identifier[os] . identifier[getcwd] ()) identifier[os] . identifier[close] ( identifier[fd] ) keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[ostr] : identifier[self] . identifier[_write_executor_script] ( identifier[ostr] ) identifier[mode] = identifier[os] . identifier[stat] ( identifier[path] ). identifier[st_mode] identifier[os] . identifier[chmod] ( identifier[path] , identifier[mode] | identifier[stat] . identifier[S_IEXEC] | identifier[stat] . identifier[S_IRGRP] | identifier[stat] . identifier[S_IRUSR] ) keyword[return] identifier[path]
def _executor_script(self): """Create shell-script in charge of executing the benchmark and return its path. """ (fd, path) = tempfile.mkstemp(suffix='.sh', dir=os.getcwd()) os.close(fd) with open(path, 'w') as ostr: self._write_executor_script(ostr) # depends on [control=['with'], data=['ostr']] mode = os.stat(path).st_mode os.chmod(path, mode | stat.S_IEXEC | stat.S_IRGRP | stat.S_IRUSR) return path
def wvalue(wave, indep_var): r""" Return the dependent variable value at a given independent variable point. If the independent variable point is not in the independent variable vector the dependent variable value is obtained by linear interpolation :param wave: Waveform :type wave: :py:class:`peng.eng.Waveform` :param indep_var: Independent variable point for which the dependent variable is to be obtained :type indep_var: integer or float :rtype: :py:class:`peng.eng.Waveform` .. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]] .. Auto-generated exceptions documentation for .. peng.wave_functions.wvalue :raises: * RuntimeError (Argument \`indep_var\` is not valid) * RuntimeError (Argument \`wave\` is not valid) * ValueError (Argument \`indep_var\` is not in the independent variable vector range) .. [[[end]]] """ close_min = np.isclose(indep_var, wave._indep_vector[0], FP_RTOL, FP_ATOL) close_max = np.isclose(indep_var, wave._indep_vector[-1], FP_RTOL, FP_ATOL) pexdoc.exh.addex( ValueError, "Argument `indep_var` is not in the independent variable vector range", bool( ((indep_var < wave._indep_vector[0]) and (not close_min)) or ((indep_var > wave._indep_vector[-1]) and (not close_max)) ), ) if close_min: return wave._dep_vector[0] if close_max: return wave._dep_vector[-1] idx = np.searchsorted(wave._indep_vector, indep_var) xdelta = wave._indep_vector[idx] - wave._indep_vector[idx - 1] ydelta = wave._dep_vector[idx] - wave._dep_vector[idx - 1] slope = ydelta / float(xdelta) return wave._dep_vector[idx - 1] + slope * (indep_var - wave._indep_vector[idx - 1])
def function[wvalue, parameter[wave, indep_var]]: constant[ Return the dependent variable value at a given independent variable point. If the independent variable point is not in the independent variable vector the dependent variable value is obtained by linear interpolation :param wave: Waveform :type wave: :py:class:`peng.eng.Waveform` :param indep_var: Independent variable point for which the dependent variable is to be obtained :type indep_var: integer or float :rtype: :py:class:`peng.eng.Waveform` .. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]] .. Auto-generated exceptions documentation for .. peng.wave_functions.wvalue :raises: * RuntimeError (Argument \`indep_var\` is not valid) * RuntimeError (Argument \`wave\` is not valid) * ValueError (Argument \`indep_var\` is not in the independent variable vector range) .. [[[end]]] ] variable[close_min] assign[=] call[name[np].isclose, parameter[name[indep_var], call[name[wave]._indep_vector][constant[0]], name[FP_RTOL], name[FP_ATOL]]] variable[close_max] assign[=] call[name[np].isclose, parameter[name[indep_var], call[name[wave]._indep_vector][<ast.UnaryOp object at 0x7da18f813190>], name[FP_RTOL], name[FP_ATOL]]] call[name[pexdoc].exh.addex, parameter[name[ValueError], constant[Argument `indep_var` is not in the independent variable vector range], call[name[bool], parameter[<ast.BoolOp object at 0x7da18f811d80>]]]] if name[close_min] begin[:] return[call[name[wave]._dep_vector][constant[0]]] if name[close_max] begin[:] return[call[name[wave]._dep_vector][<ast.UnaryOp object at 0x7da1b0287100>]] variable[idx] assign[=] call[name[np].searchsorted, parameter[name[wave]._indep_vector, name[indep_var]]] variable[xdelta] assign[=] binary_operation[call[name[wave]._indep_vector][name[idx]] - call[name[wave]._indep_vector][binary_operation[name[idx] - constant[1]]]] variable[ydelta] assign[=] binary_operation[call[name[wave]._dep_vector][name[idx]] - call[name[wave]._dep_vector][binary_operation[name[idx] - constant[1]]]] variable[slope] assign[=] binary_operation[name[ydelta] / call[name[float], parameter[name[xdelta]]]] return[binary_operation[call[name[wave]._dep_vector][binary_operation[name[idx] - constant[1]]] + binary_operation[name[slope] * binary_operation[name[indep_var] - call[name[wave]._indep_vector][binary_operation[name[idx] - constant[1]]]]]]]
keyword[def] identifier[wvalue] ( identifier[wave] , identifier[indep_var] ): literal[string] identifier[close_min] = identifier[np] . identifier[isclose] ( identifier[indep_var] , identifier[wave] . identifier[_indep_vector] [ literal[int] ], identifier[FP_RTOL] , identifier[FP_ATOL] ) identifier[close_max] = identifier[np] . identifier[isclose] ( identifier[indep_var] , identifier[wave] . identifier[_indep_vector] [- literal[int] ], identifier[FP_RTOL] , identifier[FP_ATOL] ) identifier[pexdoc] . identifier[exh] . identifier[addex] ( identifier[ValueError] , literal[string] , identifier[bool] ( (( identifier[indep_var] < identifier[wave] . identifier[_indep_vector] [ literal[int] ]) keyword[and] ( keyword[not] identifier[close_min] )) keyword[or] (( identifier[indep_var] > identifier[wave] . identifier[_indep_vector] [- literal[int] ]) keyword[and] ( keyword[not] identifier[close_max] )) ), ) keyword[if] identifier[close_min] : keyword[return] identifier[wave] . identifier[_dep_vector] [ literal[int] ] keyword[if] identifier[close_max] : keyword[return] identifier[wave] . identifier[_dep_vector] [- literal[int] ] identifier[idx] = identifier[np] . identifier[searchsorted] ( identifier[wave] . identifier[_indep_vector] , identifier[indep_var] ) identifier[xdelta] = identifier[wave] . identifier[_indep_vector] [ identifier[idx] ]- identifier[wave] . identifier[_indep_vector] [ identifier[idx] - literal[int] ] identifier[ydelta] = identifier[wave] . identifier[_dep_vector] [ identifier[idx] ]- identifier[wave] . identifier[_dep_vector] [ identifier[idx] - literal[int] ] identifier[slope] = identifier[ydelta] / identifier[float] ( identifier[xdelta] ) keyword[return] identifier[wave] . identifier[_dep_vector] [ identifier[idx] - literal[int] ]+ identifier[slope] *( identifier[indep_var] - identifier[wave] . identifier[_indep_vector] [ identifier[idx] - literal[int] ])
def wvalue(wave, indep_var): """ Return the dependent variable value at a given independent variable point. If the independent variable point is not in the independent variable vector the dependent variable value is obtained by linear interpolation :param wave: Waveform :type wave: :py:class:`peng.eng.Waveform` :param indep_var: Independent variable point for which the dependent variable is to be obtained :type indep_var: integer or float :rtype: :py:class:`peng.eng.Waveform` .. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]] .. Auto-generated exceptions documentation for .. peng.wave_functions.wvalue :raises: * RuntimeError (Argument \\`indep_var\\` is not valid) * RuntimeError (Argument \\`wave\\` is not valid) * ValueError (Argument \\`indep_var\\` is not in the independent variable vector range) .. [[[end]]] """ close_min = np.isclose(indep_var, wave._indep_vector[0], FP_RTOL, FP_ATOL) close_max = np.isclose(indep_var, wave._indep_vector[-1], FP_RTOL, FP_ATOL) pexdoc.exh.addex(ValueError, 'Argument `indep_var` is not in the independent variable vector range', bool(indep_var < wave._indep_vector[0] and (not close_min) or (indep_var > wave._indep_vector[-1] and (not close_max)))) if close_min: return wave._dep_vector[0] # depends on [control=['if'], data=[]] if close_max: return wave._dep_vector[-1] # depends on [control=['if'], data=[]] idx = np.searchsorted(wave._indep_vector, indep_var) xdelta = wave._indep_vector[idx] - wave._indep_vector[idx - 1] ydelta = wave._dep_vector[idx] - wave._dep_vector[idx - 1] slope = ydelta / float(xdelta) return wave._dep_vector[idx - 1] + slope * (indep_var - wave._indep_vector[idx - 1])
def run_task(message): """Internal ``RUN_TASK`` consumer to run the task's callable""" task = Task.objects.get(pk=message['id']) if task.allow_overlap: task.run(message) else: if not task.running: task.running = True task.save() try: task.run(message) finally: task.running = False task.save()
def function[run_task, parameter[message]]: constant[Internal ``RUN_TASK`` consumer to run the task's callable] variable[task] assign[=] call[name[Task].objects.get, parameter[]] if name[task].allow_overlap begin[:] call[name[task].run, parameter[name[message]]]
keyword[def] identifier[run_task] ( identifier[message] ): literal[string] identifier[task] = identifier[Task] . identifier[objects] . identifier[get] ( identifier[pk] = identifier[message] [ literal[string] ]) keyword[if] identifier[task] . identifier[allow_overlap] : identifier[task] . identifier[run] ( identifier[message] ) keyword[else] : keyword[if] keyword[not] identifier[task] . identifier[running] : identifier[task] . identifier[running] = keyword[True] identifier[task] . identifier[save] () keyword[try] : identifier[task] . identifier[run] ( identifier[message] ) keyword[finally] : identifier[task] . identifier[running] = keyword[False] identifier[task] . identifier[save] ()
def run_task(message): """Internal ``RUN_TASK`` consumer to run the task's callable""" task = Task.objects.get(pk=message['id']) if task.allow_overlap: task.run(message) # depends on [control=['if'], data=[]] elif not task.running: task.running = True task.save() try: task.run(message) # depends on [control=['try'], data=[]] finally: task.running = False task.save() # depends on [control=['if'], data=[]]
def _intersperse_insertion_rows_and_columns(self, pairwise_pvals): """Return pvals matrix with inserted NaN rows and columns, as numpy.ndarray. Each insertion (a header or a subtotal) creates an offset in the calculated pvals. These need to be taken into account when converting each pval to a corresponding column letter. For this reason, we need to insert an all-NaN row and a column at the right indices. These are the inserted indices of each insertion, along respective dimensions. """ for i in self._insertion_indices: pairwise_pvals = np.insert(pairwise_pvals, i, np.nan, axis=0) pairwise_pvals = np.insert(pairwise_pvals, i, np.nan, axis=1) return pairwise_pvals
def function[_intersperse_insertion_rows_and_columns, parameter[self, pairwise_pvals]]: constant[Return pvals matrix with inserted NaN rows and columns, as numpy.ndarray. Each insertion (a header or a subtotal) creates an offset in the calculated pvals. These need to be taken into account when converting each pval to a corresponding column letter. For this reason, we need to insert an all-NaN row and a column at the right indices. These are the inserted indices of each insertion, along respective dimensions. ] for taget[name[i]] in starred[name[self]._insertion_indices] begin[:] variable[pairwise_pvals] assign[=] call[name[np].insert, parameter[name[pairwise_pvals], name[i], name[np].nan]] variable[pairwise_pvals] assign[=] call[name[np].insert, parameter[name[pairwise_pvals], name[i], name[np].nan]] return[name[pairwise_pvals]]
keyword[def] identifier[_intersperse_insertion_rows_and_columns] ( identifier[self] , identifier[pairwise_pvals] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[_insertion_indices] : identifier[pairwise_pvals] = identifier[np] . identifier[insert] ( identifier[pairwise_pvals] , identifier[i] , identifier[np] . identifier[nan] , identifier[axis] = literal[int] ) identifier[pairwise_pvals] = identifier[np] . identifier[insert] ( identifier[pairwise_pvals] , identifier[i] , identifier[np] . identifier[nan] , identifier[axis] = literal[int] ) keyword[return] identifier[pairwise_pvals]
def _intersperse_insertion_rows_and_columns(self, pairwise_pvals): """Return pvals matrix with inserted NaN rows and columns, as numpy.ndarray. Each insertion (a header or a subtotal) creates an offset in the calculated pvals. These need to be taken into account when converting each pval to a corresponding column letter. For this reason, we need to insert an all-NaN row and a column at the right indices. These are the inserted indices of each insertion, along respective dimensions. """ for i in self._insertion_indices: pairwise_pvals = np.insert(pairwise_pvals, i, np.nan, axis=0) pairwise_pvals = np.insert(pairwise_pvals, i, np.nan, axis=1) # depends on [control=['for'], data=['i']] return pairwise_pvals
def sign_envelope(envelope, key_file): """Sign the given soap request with the given key""" doc = etree.fromstring(envelope) body = get_body(doc) queue = SignQueue() queue.push_and_mark(body) security_node = ensure_security_header(doc, queue) security_token_node = create_binary_security_token(key_file) signature_node = Signature( xmlsec.TransformExclC14N, xmlsec.TransformRsaSha1) security_node.append(security_token_node) security_node.append(signature_node) queue.insert_references(signature_node) key_info = create_key_info_node(security_token_node) signature_node.append(key_info) # Sign the generated xml xmlsec.addIDs(doc, ['Id']) dsigCtx = xmlsec.DSigCtx() dsigCtx.signKey = xmlsec.Key.load(key_file, xmlsec.KeyDataFormatPem, None) dsigCtx.sign(signature_node) return etree.tostring(doc)
def function[sign_envelope, parameter[envelope, key_file]]: constant[Sign the given soap request with the given key] variable[doc] assign[=] call[name[etree].fromstring, parameter[name[envelope]]] variable[body] assign[=] call[name[get_body], parameter[name[doc]]] variable[queue] assign[=] call[name[SignQueue], parameter[]] call[name[queue].push_and_mark, parameter[name[body]]] variable[security_node] assign[=] call[name[ensure_security_header], parameter[name[doc], name[queue]]] variable[security_token_node] assign[=] call[name[create_binary_security_token], parameter[name[key_file]]] variable[signature_node] assign[=] call[name[Signature], parameter[name[xmlsec].TransformExclC14N, name[xmlsec].TransformRsaSha1]] call[name[security_node].append, parameter[name[security_token_node]]] call[name[security_node].append, parameter[name[signature_node]]] call[name[queue].insert_references, parameter[name[signature_node]]] variable[key_info] assign[=] call[name[create_key_info_node], parameter[name[security_token_node]]] call[name[signature_node].append, parameter[name[key_info]]] call[name[xmlsec].addIDs, parameter[name[doc], list[[<ast.Constant object at 0x7da1b253dae0>]]]] variable[dsigCtx] assign[=] call[name[xmlsec].DSigCtx, parameter[]] name[dsigCtx].signKey assign[=] call[name[xmlsec].Key.load, parameter[name[key_file], name[xmlsec].KeyDataFormatPem, constant[None]]] call[name[dsigCtx].sign, parameter[name[signature_node]]] return[call[name[etree].tostring, parameter[name[doc]]]]
keyword[def] identifier[sign_envelope] ( identifier[envelope] , identifier[key_file] ): literal[string] identifier[doc] = identifier[etree] . identifier[fromstring] ( identifier[envelope] ) identifier[body] = identifier[get_body] ( identifier[doc] ) identifier[queue] = identifier[SignQueue] () identifier[queue] . identifier[push_and_mark] ( identifier[body] ) identifier[security_node] = identifier[ensure_security_header] ( identifier[doc] , identifier[queue] ) identifier[security_token_node] = identifier[create_binary_security_token] ( identifier[key_file] ) identifier[signature_node] = identifier[Signature] ( identifier[xmlsec] . identifier[TransformExclC14N] , identifier[xmlsec] . identifier[TransformRsaSha1] ) identifier[security_node] . identifier[append] ( identifier[security_token_node] ) identifier[security_node] . identifier[append] ( identifier[signature_node] ) identifier[queue] . identifier[insert_references] ( identifier[signature_node] ) identifier[key_info] = identifier[create_key_info_node] ( identifier[security_token_node] ) identifier[signature_node] . identifier[append] ( identifier[key_info] ) identifier[xmlsec] . identifier[addIDs] ( identifier[doc] ,[ literal[string] ]) identifier[dsigCtx] = identifier[xmlsec] . identifier[DSigCtx] () identifier[dsigCtx] . identifier[signKey] = identifier[xmlsec] . identifier[Key] . identifier[load] ( identifier[key_file] , identifier[xmlsec] . identifier[KeyDataFormatPem] , keyword[None] ) identifier[dsigCtx] . identifier[sign] ( identifier[signature_node] ) keyword[return] identifier[etree] . identifier[tostring] ( identifier[doc] )
def sign_envelope(envelope, key_file): """Sign the given soap request with the given key""" doc = etree.fromstring(envelope) body = get_body(doc) queue = SignQueue() queue.push_and_mark(body) security_node = ensure_security_header(doc, queue) security_token_node = create_binary_security_token(key_file) signature_node = Signature(xmlsec.TransformExclC14N, xmlsec.TransformRsaSha1) security_node.append(security_token_node) security_node.append(signature_node) queue.insert_references(signature_node) key_info = create_key_info_node(security_token_node) signature_node.append(key_info) # Sign the generated xml xmlsec.addIDs(doc, ['Id']) dsigCtx = xmlsec.DSigCtx() dsigCtx.signKey = xmlsec.Key.load(key_file, xmlsec.KeyDataFormatPem, None) dsigCtx.sign(signature_node) return etree.tostring(doc)
def all(self): """ Gets all saved queries for a project from the Keen IO API. Master key must be set. """ response = self._get_json(HTTPMethods.GET, self.saved_query_url, self._get_master_key()) return response
def function[all, parameter[self]]: constant[ Gets all saved queries for a project from the Keen IO API. Master key must be set. ] variable[response] assign[=] call[name[self]._get_json, parameter[name[HTTPMethods].GET, name[self].saved_query_url, call[name[self]._get_master_key, parameter[]]]] return[name[response]]
keyword[def] identifier[all] ( identifier[self] ): literal[string] identifier[response] = identifier[self] . identifier[_get_json] ( identifier[HTTPMethods] . identifier[GET] , identifier[self] . identifier[saved_query_url] , identifier[self] . identifier[_get_master_key] ()) keyword[return] identifier[response]
def all(self): """ Gets all saved queries for a project from the Keen IO API. Master key must be set. """ response = self._get_json(HTTPMethods.GET, self.saved_query_url, self._get_master_key()) return response
def execute(self, request): '''Execute a new ``request``. ''' handle = None if request: request[0] = command = to_string(request[0]).lower() info = COMMANDS_INFO.get(command) if info: handle = getattr(self.store, info.method_name) # if self.channels or self.patterns: if command not in self.store.SUBSCRIBE_COMMANDS: return self.reply_error(self.store.PUBSUB_ONLY) if self.blocked: return self.reply_error('Blocked client cannot request') if self.transaction is not None and command not in 'exec': self.transaction.append((handle, request)) return self.connection.write(self.store.QUEUED) self.execute_command(handle, request)
def function[execute, parameter[self, request]]: constant[Execute a new ``request``. ] variable[handle] assign[=] constant[None] if name[request] begin[:] call[name[request]][constant[0]] assign[=] call[call[name[to_string], parameter[call[name[request]][constant[0]]]].lower, parameter[]] variable[info] assign[=] call[name[COMMANDS_INFO].get, parameter[name[command]]] if name[info] begin[:] variable[handle] assign[=] call[name[getattr], parameter[name[self].store, name[info].method_name]] if <ast.BoolOp object at 0x7da204567490> begin[:] if compare[name[command] <ast.NotIn object at 0x7da2590d7190> name[self].store.SUBSCRIBE_COMMANDS] begin[:] return[call[name[self].reply_error, parameter[name[self].store.PUBSUB_ONLY]]] if name[self].blocked begin[:] return[call[name[self].reply_error, parameter[constant[Blocked client cannot request]]]] if <ast.BoolOp object at 0x7da204564e50> begin[:] call[name[self].transaction.append, parameter[tuple[[<ast.Name object at 0x7da204565ed0>, <ast.Name object at 0x7da204567a60>]]]] return[call[name[self].connection.write, parameter[name[self].store.QUEUED]]] call[name[self].execute_command, parameter[name[handle], name[request]]]
keyword[def] identifier[execute] ( identifier[self] , identifier[request] ): literal[string] identifier[handle] = keyword[None] keyword[if] identifier[request] : identifier[request] [ literal[int] ]= identifier[command] = identifier[to_string] ( identifier[request] [ literal[int] ]). identifier[lower] () identifier[info] = identifier[COMMANDS_INFO] . identifier[get] ( identifier[command] ) keyword[if] identifier[info] : identifier[handle] = identifier[getattr] ( identifier[self] . identifier[store] , identifier[info] . identifier[method_name] ) keyword[if] identifier[self] . identifier[channels] keyword[or] identifier[self] . identifier[patterns] : keyword[if] identifier[command] keyword[not] keyword[in] identifier[self] . identifier[store] . identifier[SUBSCRIBE_COMMANDS] : keyword[return] identifier[self] . identifier[reply_error] ( identifier[self] . identifier[store] . identifier[PUBSUB_ONLY] ) keyword[if] identifier[self] . identifier[blocked] : keyword[return] identifier[self] . identifier[reply_error] ( literal[string] ) keyword[if] identifier[self] . identifier[transaction] keyword[is] keyword[not] keyword[None] keyword[and] identifier[command] keyword[not] keyword[in] literal[string] : identifier[self] . identifier[transaction] . identifier[append] (( identifier[handle] , identifier[request] )) keyword[return] identifier[self] . identifier[connection] . identifier[write] ( identifier[self] . identifier[store] . identifier[QUEUED] ) identifier[self] . identifier[execute_command] ( identifier[handle] , identifier[request] )
def execute(self, request): """Execute a new ``request``. """ handle = None if request: request[0] = command = to_string(request[0]).lower() info = COMMANDS_INFO.get(command) if info: handle = getattr(self.store, info.method_name) # depends on [control=['if'], data=[]] # if self.channels or self.patterns: if command not in self.store.SUBSCRIBE_COMMANDS: return self.reply_error(self.store.PUBSUB_ONLY) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.blocked: return self.reply_error('Blocked client cannot request') # depends on [control=['if'], data=[]] if self.transaction is not None and command not in 'exec': self.transaction.append((handle, request)) return self.connection.write(self.store.QUEUED) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self.execute_command(handle, request)
def _check_wait_input_flag(self): """ Returns a function to stop the search of the investigated node of the ArciDispatch algorithm. :return: A function to stop the search. :rtype: (bool, str) -> bool """ wf_pred = self._wf_pred # Namespace shortcuts. pred = {k: set(v).issubset for k, v in self._pred.items()} if self._wait_in: we = self._wait_in.get # Namespace shortcut. def check_wait_input_flag(wait_in, n_id): """ Stops the search of the investigated node of the ArciDispatch algorithm, until all inputs are satisfied. :param wait_in: If True the node is waiting input estimations. :type wait_in: bool :param n_id: Data or function node id. :type n_id: str :return: True if all node inputs are satisfied, otherwise False. :rtype: bool """ # Return true if the node inputs are satisfied. if we(n_id, wait_in): return not pred[n_id](wf_pred[n_id]) return False else: def check_wait_input_flag(wait_in, n_id): # Return true if the node inputs are satisfied. return wait_in and not pred[n_id](wf_pred[n_id]) return check_wait_input_flag
def function[_check_wait_input_flag, parameter[self]]: constant[ Returns a function to stop the search of the investigated node of the ArciDispatch algorithm. :return: A function to stop the search. :rtype: (bool, str) -> bool ] variable[wf_pred] assign[=] name[self]._wf_pred variable[pred] assign[=] <ast.DictComp object at 0x7da2049621d0> if name[self]._wait_in begin[:] variable[we] assign[=] name[self]._wait_in.get def function[check_wait_input_flag, parameter[wait_in, n_id]]: constant[ Stops the search of the investigated node of the ArciDispatch algorithm, until all inputs are satisfied. :param wait_in: If True the node is waiting input estimations. :type wait_in: bool :param n_id: Data or function node id. :type n_id: str :return: True if all node inputs are satisfied, otherwise False. :rtype: bool ] if call[name[we], parameter[name[n_id], name[wait_in]]] begin[:] return[<ast.UnaryOp object at 0x7da204962da0>] return[constant[False]] return[name[check_wait_input_flag]]
keyword[def] identifier[_check_wait_input_flag] ( identifier[self] ): literal[string] identifier[wf_pred] = identifier[self] . identifier[_wf_pred] identifier[pred] ={ identifier[k] : identifier[set] ( identifier[v] ). identifier[issubset] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[_pred] . identifier[items] ()} keyword[if] identifier[self] . identifier[_wait_in] : identifier[we] = identifier[self] . identifier[_wait_in] . identifier[get] keyword[def] identifier[check_wait_input_flag] ( identifier[wait_in] , identifier[n_id] ): literal[string] keyword[if] identifier[we] ( identifier[n_id] , identifier[wait_in] ): keyword[return] keyword[not] identifier[pred] [ identifier[n_id] ]( identifier[wf_pred] [ identifier[n_id] ]) keyword[return] keyword[False] keyword[else] : keyword[def] identifier[check_wait_input_flag] ( identifier[wait_in] , identifier[n_id] ): keyword[return] identifier[wait_in] keyword[and] keyword[not] identifier[pred] [ identifier[n_id] ]( identifier[wf_pred] [ identifier[n_id] ]) keyword[return] identifier[check_wait_input_flag]
def _check_wait_input_flag(self): """ Returns a function to stop the search of the investigated node of the ArciDispatch algorithm. :return: A function to stop the search. :rtype: (bool, str) -> bool """ wf_pred = self._wf_pred # Namespace shortcuts. pred = {k: set(v).issubset for (k, v) in self._pred.items()} if self._wait_in: we = self._wait_in.get # Namespace shortcut. def check_wait_input_flag(wait_in, n_id): """ Stops the search of the investigated node of the ArciDispatch algorithm, until all inputs are satisfied. :param wait_in: If True the node is waiting input estimations. :type wait_in: bool :param n_id: Data or function node id. :type n_id: str :return: True if all node inputs are satisfied, otherwise False. :rtype: bool """ # Return true if the node inputs are satisfied. if we(n_id, wait_in): return not pred[n_id](wf_pred[n_id]) # depends on [control=['if'], data=[]] return False # depends on [control=['if'], data=[]] else: def check_wait_input_flag(wait_in, n_id): # Return true if the node inputs are satisfied. return wait_in and (not pred[n_id](wf_pred[n_id])) return check_wait_input_flag
def rownumbers(self, table=None): """Return a list containing the row numbers of this table. This method can be useful after a selection or a sort. It returns the row numbers of the rows in this table with respect to the given table. If no table is given, the original table is used. For example:: t = table('W53.MS') t1 = t.selectrows([1,3,5,7,9]) # select a few rows t1.rownumbers(t) # [1 3 5 7 9] t2 = t1.selectrows([2,5]) # select rows from the selection t2.rownumbers(t1) # [2 5] # rownrs of t2 in table t1 t2.rownumbers(t) # [3 9] # rownrs of t2 in t t2.rownumbers() # [3 9] The last statements show that the method returns the row numbers referring to the given table. Table t2 contains rows 2 and 5 in table t1, which are rows 3 and 9 in table t. """ if table is None: return self._rownumbers(Table()) return self._rownumbers(table)
def function[rownumbers, parameter[self, table]]: constant[Return a list containing the row numbers of this table. This method can be useful after a selection or a sort. It returns the row numbers of the rows in this table with respect to the given table. If no table is given, the original table is used. For example:: t = table('W53.MS') t1 = t.selectrows([1,3,5,7,9]) # select a few rows t1.rownumbers(t) # [1 3 5 7 9] t2 = t1.selectrows([2,5]) # select rows from the selection t2.rownumbers(t1) # [2 5] # rownrs of t2 in table t1 t2.rownumbers(t) # [3 9] # rownrs of t2 in t t2.rownumbers() # [3 9] The last statements show that the method returns the row numbers referring to the given table. Table t2 contains rows 2 and 5 in table t1, which are rows 3 and 9 in table t. ] if compare[name[table] is constant[None]] begin[:] return[call[name[self]._rownumbers, parameter[call[name[Table], parameter[]]]]] return[call[name[self]._rownumbers, parameter[name[table]]]]
keyword[def] identifier[rownumbers] ( identifier[self] , identifier[table] = keyword[None] ): literal[string] keyword[if] identifier[table] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[_rownumbers] ( identifier[Table] ()) keyword[return] identifier[self] . identifier[_rownumbers] ( identifier[table] )
def rownumbers(self, table=None): """Return a list containing the row numbers of this table. This method can be useful after a selection or a sort. It returns the row numbers of the rows in this table with respect to the given table. If no table is given, the original table is used. For example:: t = table('W53.MS') t1 = t.selectrows([1,3,5,7,9]) # select a few rows t1.rownumbers(t) # [1 3 5 7 9] t2 = t1.selectrows([2,5]) # select rows from the selection t2.rownumbers(t1) # [2 5] # rownrs of t2 in table t1 t2.rownumbers(t) # [3 9] # rownrs of t2 in t t2.rownumbers() # [3 9] The last statements show that the method returns the row numbers referring to the given table. Table t2 contains rows 2 and 5 in table t1, which are rows 3 and 9 in table t. """ if table is None: return self._rownumbers(Table()) # depends on [control=['if'], data=[]] return self._rownumbers(table)
def get_function_spec(name): """Return a dictionary with the specification of a function: parameter names and defaults (value, bounds, scale, etc.). Returns ------- par_names : list List of parameter names for this function. norm_par : str Name of normalization parameter. default : dict Parameter defaults dictionary. """ if not hasattr(get_function_spec, 'fndict'): modelfile = os.path.join('$FERMIPY_ROOT', 'data', 'models.yaml') modelfile = os.path.expandvars(modelfile) get_function_spec.fndict = yaml.load(open(modelfile)) if not name in get_function_spec.fndict.keys(): raise Exception('Invalid Function Name: %s' % name) return get_function_spec.fndict[name]
def function[get_function_spec, parameter[name]]: constant[Return a dictionary with the specification of a function: parameter names and defaults (value, bounds, scale, etc.). Returns ------- par_names : list List of parameter names for this function. norm_par : str Name of normalization parameter. default : dict Parameter defaults dictionary. ] if <ast.UnaryOp object at 0x7da18f58e920> begin[:] variable[modelfile] assign[=] call[name[os].path.join, parameter[constant[$FERMIPY_ROOT], constant[data], constant[models.yaml]]] variable[modelfile] assign[=] call[name[os].path.expandvars, parameter[name[modelfile]]] name[get_function_spec].fndict assign[=] call[name[yaml].load, parameter[call[name[open], parameter[name[modelfile]]]]] if <ast.UnaryOp object at 0x7da18f58db70> begin[:] <ast.Raise object at 0x7da18f58dc60> return[call[name[get_function_spec].fndict][name[name]]]
keyword[def] identifier[get_function_spec] ( identifier[name] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[get_function_spec] , literal[string] ): identifier[modelfile] = identifier[os] . identifier[path] . identifier[join] ( literal[string] , literal[string] , literal[string] ) identifier[modelfile] = identifier[os] . identifier[path] . identifier[expandvars] ( identifier[modelfile] ) identifier[get_function_spec] . identifier[fndict] = identifier[yaml] . identifier[load] ( identifier[open] ( identifier[modelfile] )) keyword[if] keyword[not] identifier[name] keyword[in] identifier[get_function_spec] . identifier[fndict] . identifier[keys] (): keyword[raise] identifier[Exception] ( literal[string] % identifier[name] ) keyword[return] identifier[get_function_spec] . identifier[fndict] [ identifier[name] ]
def get_function_spec(name): """Return a dictionary with the specification of a function: parameter names and defaults (value, bounds, scale, etc.). Returns ------- par_names : list List of parameter names for this function. norm_par : str Name of normalization parameter. default : dict Parameter defaults dictionary. """ if not hasattr(get_function_spec, 'fndict'): modelfile = os.path.join('$FERMIPY_ROOT', 'data', 'models.yaml') modelfile = os.path.expandvars(modelfile) get_function_spec.fndict = yaml.load(open(modelfile)) # depends on [control=['if'], data=[]] if not name in get_function_spec.fndict.keys(): raise Exception('Invalid Function Name: %s' % name) # depends on [control=['if'], data=[]] return get_function_spec.fndict[name]
def RunValidationOutputFromOptions(feed, options): """Validate feed, output results per options and return an exit code.""" if options.output.upper() == "CONSOLE": return RunValidationOutputToConsole(feed, options) else: return RunValidationOutputToFilename(feed, options, options.output)
def function[RunValidationOutputFromOptions, parameter[feed, options]]: constant[Validate feed, output results per options and return an exit code.] if compare[call[name[options].output.upper, parameter[]] equal[==] constant[CONSOLE]] begin[:] return[call[name[RunValidationOutputToConsole], parameter[name[feed], name[options]]]]
keyword[def] identifier[RunValidationOutputFromOptions] ( identifier[feed] , identifier[options] ): literal[string] keyword[if] identifier[options] . identifier[output] . identifier[upper] ()== literal[string] : keyword[return] identifier[RunValidationOutputToConsole] ( identifier[feed] , identifier[options] ) keyword[else] : keyword[return] identifier[RunValidationOutputToFilename] ( identifier[feed] , identifier[options] , identifier[options] . identifier[output] )
def RunValidationOutputFromOptions(feed, options): """Validate feed, output results per options and return an exit code.""" if options.output.upper() == 'CONSOLE': return RunValidationOutputToConsole(feed, options) # depends on [control=['if'], data=[]] else: return RunValidationOutputToFilename(feed, options, options.output)
def resolve_post(self, post): """Mark post as resolved :type post: dict|str|int :param post: Either the post dict returned by another API method, or the `cid` field of that post. :returns: True if it is successful. False otherwise """ try: cid = post["id"] except KeyError: cid = post params = { "cid": cid, "resolved": "true" } return self._rpc.content_mark_resolved(params)
def function[resolve_post, parameter[self, post]]: constant[Mark post as resolved :type post: dict|str|int :param post: Either the post dict returned by another API method, or the `cid` field of that post. :returns: True if it is successful. False otherwise ] <ast.Try object at 0x7da2044c02e0> variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fee020>, <ast.Constant object at 0x7da1b0fee6e0>], [<ast.Name object at 0x7da1b0fee6b0>, <ast.Constant object at 0x7da1b0fef9d0>]] return[call[name[self]._rpc.content_mark_resolved, parameter[name[params]]]]
keyword[def] identifier[resolve_post] ( identifier[self] , identifier[post] ): literal[string] keyword[try] : identifier[cid] = identifier[post] [ literal[string] ] keyword[except] identifier[KeyError] : identifier[cid] = identifier[post] identifier[params] ={ literal[string] : identifier[cid] , literal[string] : literal[string] } keyword[return] identifier[self] . identifier[_rpc] . identifier[content_mark_resolved] ( identifier[params] )
def resolve_post(self, post): """Mark post as resolved :type post: dict|str|int :param post: Either the post dict returned by another API method, or the `cid` field of that post. :returns: True if it is successful. False otherwise """ try: cid = post['id'] # depends on [control=['try'], data=[]] except KeyError: cid = post # depends on [control=['except'], data=[]] params = {'cid': cid, 'resolved': 'true'} return self._rpc.content_mark_resolved(params)
def console_set_char_background( con: tcod.console.Console, x: int, y: int, col: Tuple[int, int, int], flag: int = BKGND_SET, ) -> None: """Change the background color of x,y to col using a blend mode. Args: con (Console): Any Console instance. x (int): Character x position from the left. y (int): Character y position from the top. col (Union[Tuple[int, int, int], Sequence[int]]): An (r, g, b) sequence or Color instance. flag (int): Blending mode to use, defaults to BKGND_SET. """ lib.TCOD_console_set_char_background(_console(con), x, y, col, flag)
def function[console_set_char_background, parameter[con, x, y, col, flag]]: constant[Change the background color of x,y to col using a blend mode. Args: con (Console): Any Console instance. x (int): Character x position from the left. y (int): Character y position from the top. col (Union[Tuple[int, int, int], Sequence[int]]): An (r, g, b) sequence or Color instance. flag (int): Blending mode to use, defaults to BKGND_SET. ] call[name[lib].TCOD_console_set_char_background, parameter[call[name[_console], parameter[name[con]]], name[x], name[y], name[col], name[flag]]]
keyword[def] identifier[console_set_char_background] ( identifier[con] : identifier[tcod] . identifier[console] . identifier[Console] , identifier[x] : identifier[int] , identifier[y] : identifier[int] , identifier[col] : identifier[Tuple] [ identifier[int] , identifier[int] , identifier[int] ], identifier[flag] : identifier[int] = identifier[BKGND_SET] , )-> keyword[None] : literal[string] identifier[lib] . identifier[TCOD_console_set_char_background] ( identifier[_console] ( identifier[con] ), identifier[x] , identifier[y] , identifier[col] , identifier[flag] )
def console_set_char_background(con: tcod.console.Console, x: int, y: int, col: Tuple[int, int, int], flag: int=BKGND_SET) -> None: """Change the background color of x,y to col using a blend mode. Args: con (Console): Any Console instance. x (int): Character x position from the left. y (int): Character y position from the top. col (Union[Tuple[int, int, int], Sequence[int]]): An (r, g, b) sequence or Color instance. flag (int): Blending mode to use, defaults to BKGND_SET. """ lib.TCOD_console_set_char_background(_console(con), x, y, col, flag)
def parse_limits_list(path, limits): """Parse a structured list of flux limits as obtained from a YAML file Yields tuples of reaction ID, lower and upper flux bounds. Path can be given as a string or a context. """ context = FilePathContext(path) for limit_def in limits: if 'include' in limit_def: include_context = context.resolve(limit_def['include']) for limit in parse_limits_file(include_context): yield limit else: yield parse_limit(limit_def)
def function[parse_limits_list, parameter[path, limits]]: constant[Parse a structured list of flux limits as obtained from a YAML file Yields tuples of reaction ID, lower and upper flux bounds. Path can be given as a string or a context. ] variable[context] assign[=] call[name[FilePathContext], parameter[name[path]]] for taget[name[limit_def]] in starred[name[limits]] begin[:] if compare[constant[include] in name[limit_def]] begin[:] variable[include_context] assign[=] call[name[context].resolve, parameter[call[name[limit_def]][constant[include]]]] for taget[name[limit]] in starred[call[name[parse_limits_file], parameter[name[include_context]]]] begin[:] <ast.Yield object at 0x7da207f991e0>
keyword[def] identifier[parse_limits_list] ( identifier[path] , identifier[limits] ): literal[string] identifier[context] = identifier[FilePathContext] ( identifier[path] ) keyword[for] identifier[limit_def] keyword[in] identifier[limits] : keyword[if] literal[string] keyword[in] identifier[limit_def] : identifier[include_context] = identifier[context] . identifier[resolve] ( identifier[limit_def] [ literal[string] ]) keyword[for] identifier[limit] keyword[in] identifier[parse_limits_file] ( identifier[include_context] ): keyword[yield] identifier[limit] keyword[else] : keyword[yield] identifier[parse_limit] ( identifier[limit_def] )
def parse_limits_list(path, limits): """Parse a structured list of flux limits as obtained from a YAML file Yields tuples of reaction ID, lower and upper flux bounds. Path can be given as a string or a context. """ context = FilePathContext(path) for limit_def in limits: if 'include' in limit_def: include_context = context.resolve(limit_def['include']) for limit in parse_limits_file(include_context): yield limit # depends on [control=['for'], data=['limit']] # depends on [control=['if'], data=['limit_def']] else: yield parse_limit(limit_def) # depends on [control=['for'], data=['limit_def']]
def find_schemas(self, schema_path, schema_type=SCHEMA_TYPE_PROCESS, verbosity=1): """Find schemas in packages that match filters.""" schema_matches = [] if not os.path.isdir(schema_path): if verbosity > 0: self.stdout.write("Invalid path {}".format(schema_path)) return if schema_type not in [SCHEMA_TYPE_PROCESS, SCHEMA_TYPE_DESCRIPTOR]: raise ValueError('Invalid schema type') for root, _, files in os.walk(schema_path): for schema_file in [os.path.join(root, fn) for fn in files]: schemas = None if schema_type == SCHEMA_TYPE_DESCRIPTOR: # Discover descriptors. schemas = self.find_descriptor_schemas(schema_file) elif schema_type == SCHEMA_TYPE_PROCESS: # Perform process discovery for all supported execution engines. schemas = [] for execution_engine in manager.execution_engines.values(): schemas.extend(execution_engine.discover_process(schema_file)) for schema in schemas: schema_matches.append(schema) return schema_matches
def function[find_schemas, parameter[self, schema_path, schema_type, verbosity]]: constant[Find schemas in packages that match filters.] variable[schema_matches] assign[=] list[[]] if <ast.UnaryOp object at 0x7da1b1b84bb0> begin[:] if compare[name[verbosity] greater[>] constant[0]] begin[:] call[name[self].stdout.write, parameter[call[constant[Invalid path {}].format, parameter[name[schema_path]]]]] return[None] if compare[name[schema_type] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Name object at 0x7da20e9b0400>, <ast.Name object at 0x7da20e9b1480>]]] begin[:] <ast.Raise object at 0x7da20e9b3fa0> for taget[tuple[[<ast.Name object at 0x7da20e9b38e0>, <ast.Name object at 0x7da20e9b3130>, <ast.Name object at 0x7da20e9b3d60>]]] in starred[call[name[os].walk, parameter[name[schema_path]]]] begin[:] for taget[name[schema_file]] in starred[<ast.ListComp object at 0x7da20e9b0d00>] begin[:] variable[schemas] assign[=] constant[None] if compare[name[schema_type] equal[==] name[SCHEMA_TYPE_DESCRIPTOR]] begin[:] variable[schemas] assign[=] call[name[self].find_descriptor_schemas, parameter[name[schema_file]]] for taget[name[schema]] in starred[name[schemas]] begin[:] call[name[schema_matches].append, parameter[name[schema]]] return[name[schema_matches]]
keyword[def] identifier[find_schemas] ( identifier[self] , identifier[schema_path] , identifier[schema_type] = identifier[SCHEMA_TYPE_PROCESS] , identifier[verbosity] = literal[int] ): literal[string] identifier[schema_matches] =[] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[schema_path] ): keyword[if] identifier[verbosity] > literal[int] : identifier[self] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( identifier[schema_path] )) keyword[return] keyword[if] identifier[schema_type] keyword[not] keyword[in] [ identifier[SCHEMA_TYPE_PROCESS] , identifier[SCHEMA_TYPE_DESCRIPTOR] ]: keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[root] , identifier[_] , identifier[files] keyword[in] identifier[os] . identifier[walk] ( identifier[schema_path] ): keyword[for] identifier[schema_file] keyword[in] [ identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[fn] ) keyword[for] identifier[fn] keyword[in] identifier[files] ]: identifier[schemas] = keyword[None] keyword[if] identifier[schema_type] == identifier[SCHEMA_TYPE_DESCRIPTOR] : identifier[schemas] = identifier[self] . identifier[find_descriptor_schemas] ( identifier[schema_file] ) keyword[elif] identifier[schema_type] == identifier[SCHEMA_TYPE_PROCESS] : identifier[schemas] =[] keyword[for] identifier[execution_engine] keyword[in] identifier[manager] . identifier[execution_engines] . identifier[values] (): identifier[schemas] . identifier[extend] ( identifier[execution_engine] . identifier[discover_process] ( identifier[schema_file] )) keyword[for] identifier[schema] keyword[in] identifier[schemas] : identifier[schema_matches] . identifier[append] ( identifier[schema] ) keyword[return] identifier[schema_matches]
def find_schemas(self, schema_path, schema_type=SCHEMA_TYPE_PROCESS, verbosity=1): """Find schemas in packages that match filters.""" schema_matches = [] if not os.path.isdir(schema_path): if verbosity > 0: self.stdout.write('Invalid path {}'.format(schema_path)) # depends on [control=['if'], data=[]] return # depends on [control=['if'], data=[]] if schema_type not in [SCHEMA_TYPE_PROCESS, SCHEMA_TYPE_DESCRIPTOR]: raise ValueError('Invalid schema type') # depends on [control=['if'], data=[]] for (root, _, files) in os.walk(schema_path): for schema_file in [os.path.join(root, fn) for fn in files]: schemas = None if schema_type == SCHEMA_TYPE_DESCRIPTOR: # Discover descriptors. schemas = self.find_descriptor_schemas(schema_file) # depends on [control=['if'], data=[]] elif schema_type == SCHEMA_TYPE_PROCESS: # Perform process discovery for all supported execution engines. schemas = [] for execution_engine in manager.execution_engines.values(): schemas.extend(execution_engine.discover_process(schema_file)) # depends on [control=['for'], data=['execution_engine']] # depends on [control=['if'], data=[]] for schema in schemas: schema_matches.append(schema) # depends on [control=['for'], data=['schema']] # depends on [control=['for'], data=['schema_file']] # depends on [control=['for'], data=[]] return schema_matches
def ar1_gen(rho, mu, sigma, size=1): """Create an autoregressive series of order one AR(1) generator. .. math:: X_t = \mu_t + \rho (X_{t-1}-\mu_{t-1} + \epsilon_t If mu is a sequence and size > len(mu), the algorithm loops through mu. :Stochastics: rho : scalar in [0,1] mu : scalar or sequence sigma : scalar > 0 size : integer """ mu = np.asarray(mu, float) mu = np.resize(mu, size) r = mu.copy() r += np.random.randn(size) * sigma r[0] = np.random.randn(1) * sigma / np.sqrt(1 - rho ** 2) i = 0 while True: yield r[i] i += 1 if i == size: break r[i] += rho * (r[i - 1] - mu[i - 1])
def function[ar1_gen, parameter[rho, mu, sigma, size]]: constant[Create an autoregressive series of order one AR(1) generator. .. math:: X_t = \mu_t + ho (X_{t-1}-\mu_{t-1} + \epsilon_t If mu is a sequence and size > len(mu), the algorithm loops through mu. :Stochastics: rho : scalar in [0,1] mu : scalar or sequence sigma : scalar > 0 size : integer ] variable[mu] assign[=] call[name[np].asarray, parameter[name[mu], name[float]]] variable[mu] assign[=] call[name[np].resize, parameter[name[mu], name[size]]] variable[r] assign[=] call[name[mu].copy, parameter[]] <ast.AugAssign object at 0x7da20c7ca290> call[name[r]][constant[0]] assign[=] binary_operation[binary_operation[call[name[np].random.randn, parameter[constant[1]]] * name[sigma]] / call[name[np].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[rho] ** constant[2]]]]]] variable[i] assign[=] constant[0] while constant[True] begin[:] <ast.Yield object at 0x7da20c7cb5b0> <ast.AugAssign object at 0x7da20c7cb400> if compare[name[i] equal[==] name[size]] begin[:] break <ast.AugAssign object at 0x7da20c7c9ff0>
keyword[def] identifier[ar1_gen] ( identifier[rho] , identifier[mu] , identifier[sigma] , identifier[size] = literal[int] ): literal[string] identifier[mu] = identifier[np] . identifier[asarray] ( identifier[mu] , identifier[float] ) identifier[mu] = identifier[np] . identifier[resize] ( identifier[mu] , identifier[size] ) identifier[r] = identifier[mu] . identifier[copy] () identifier[r] += identifier[np] . identifier[random] . identifier[randn] ( identifier[size] )* identifier[sigma] identifier[r] [ literal[int] ]= identifier[np] . identifier[random] . identifier[randn] ( literal[int] )* identifier[sigma] / identifier[np] . identifier[sqrt] ( literal[int] - identifier[rho] ** literal[int] ) identifier[i] = literal[int] keyword[while] keyword[True] : keyword[yield] identifier[r] [ identifier[i] ] identifier[i] += literal[int] keyword[if] identifier[i] == identifier[size] : keyword[break] identifier[r] [ identifier[i] ]+= identifier[rho] *( identifier[r] [ identifier[i] - literal[int] ]- identifier[mu] [ identifier[i] - literal[int] ])
def ar1_gen(rho, mu, sigma, size=1): """Create an autoregressive series of order one AR(1) generator. .. math:: X_t = \\mu_t + \rho (X_{t-1}-\\mu_{t-1} + \\epsilon_t If mu is a sequence and size > len(mu), the algorithm loops through mu. :Stochastics: rho : scalar in [0,1] mu : scalar or sequence sigma : scalar > 0 size : integer """ mu = np.asarray(mu, float) mu = np.resize(mu, size) r = mu.copy() r += np.random.randn(size) * sigma r[0] = np.random.randn(1) * sigma / np.sqrt(1 - rho ** 2) i = 0 while True: yield r[i] i += 1 if i == size: break # depends on [control=['if'], data=[]] r[i] += rho * (r[i - 1] - mu[i - 1]) # depends on [control=['while'], data=[]]
def wherefunc(self, func, fieldname=None, negate=False): """ .wherefunc(func, fieldname=None, negate=False) Applies a function to an entire row and filters the rows based on the boolean output of that function. If you pass in the fieldname, optionally, the function will simply take the value at that fieldname, rather than the whole row. """ if fieldname is not None: if negate: return self.mask([not func(value) for value in self[fieldname]]) else: return self.mask([func(value) for value in self[fieldname]]) else: if negate: return self.mask([not func(row) for row in self]) else: return self.mask([func(row) for row in self])
def function[wherefunc, parameter[self, func, fieldname, negate]]: constant[ .wherefunc(func, fieldname=None, negate=False) Applies a function to an entire row and filters the rows based on the boolean output of that function. If you pass in the fieldname, optionally, the function will simply take the value at that fieldname, rather than the whole row. ] if compare[name[fieldname] is_not constant[None]] begin[:] if name[negate] begin[:] return[call[name[self].mask, parameter[<ast.ListComp object at 0x7da1b13d6200>]]]
keyword[def] identifier[wherefunc] ( identifier[self] , identifier[func] , identifier[fieldname] = keyword[None] , identifier[negate] = keyword[False] ): literal[string] keyword[if] identifier[fieldname] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[negate] : keyword[return] identifier[self] . identifier[mask] ([ keyword[not] identifier[func] ( identifier[value] ) keyword[for] identifier[value] keyword[in] identifier[self] [ identifier[fieldname] ]]) keyword[else] : keyword[return] identifier[self] . identifier[mask] ([ identifier[func] ( identifier[value] ) keyword[for] identifier[value] keyword[in] identifier[self] [ identifier[fieldname] ]]) keyword[else] : keyword[if] identifier[negate] : keyword[return] identifier[self] . identifier[mask] ([ keyword[not] identifier[func] ( identifier[row] ) keyword[for] identifier[row] keyword[in] identifier[self] ]) keyword[else] : keyword[return] identifier[self] . identifier[mask] ([ identifier[func] ( identifier[row] ) keyword[for] identifier[row] keyword[in] identifier[self] ])
def wherefunc(self, func, fieldname=None, negate=False): """ .wherefunc(func, fieldname=None, negate=False) Applies a function to an entire row and filters the rows based on the boolean output of that function. If you pass in the fieldname, optionally, the function will simply take the value at that fieldname, rather than the whole row. """ if fieldname is not None: if negate: return self.mask([not func(value) for value in self[fieldname]]) # depends on [control=['if'], data=[]] else: return self.mask([func(value) for value in self[fieldname]]) # depends on [control=['if'], data=['fieldname']] elif negate: return self.mask([not func(row) for row in self]) # depends on [control=['if'], data=[]] else: return self.mask([func(row) for row in self])
def bootstrap(version='develop', script=None, hosts='', script_args='', roster='flat', ssh_user=None, ssh_password=None, ssh_priv_key=None, tmp_dir='/tmp/.bootstrap', http_backend='tornado'): ''' Bootstrap minions with salt-bootstrap version : develop Git tag of version to install script : https://bootstrap.saltstack.com URL containing the script to execute hosts Comma-separated hosts [example: hosts='host1.local,host2.local']. These hosts need to exist in the specified roster. script_args Any additional arguments that you want to pass to the script. .. versionadded:: 2016.11.0 roster : flat The roster to use for Salt SSH. More information about roster files can be found in :ref:`Salt's Roster Documentation <ssh-roster>`. A full list of roster types, see the :ref:`builtin roster modules <all-salt.roster>` documentation. .. versionadded:: 2016.11.0 ssh_user If ``user`` isn't found in the ``roster``, a default SSH user can be set here. Keep in mind that ``ssh_user`` will not override the roster ``user`` value if it is already defined. .. versionadded:: 2016.11.0 ssh_password If ``passwd`` isn't found in the ``roster``, a default SSH password can be set here. Keep in mind that ``ssh_password`` will not override the roster ``passwd`` value if it is already defined. .. versionadded:: 2016.11.0 ssh_privkey If ``priv`` isn't found in the ``roster``, a default SSH private key can be set here. Keep in mind that ``ssh_password`` will not override the roster ``passwd`` value if it is already defined. .. versionadded:: 2016.11.0 tmp_dir : /tmp/.bootstrap The temporary directory to download the bootstrap script in. This directory will have ``-<uuid4>`` appended to it. For example: ``/tmp/.bootstrap-a19a728e-d40a-4801-aba9-d00655c143a7/`` .. versionadded:: 2016.11.0 http_backend : tornado The backend library to use to download the script. If you need to use a ``file:///`` URL, then you should set this to ``urllib2``. .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt-run manage.bootstrap hosts='host1,host2' salt-run manage.bootstrap hosts='host1,host2' version='v0.17' salt-run manage.bootstrap hosts='host1,host2' version='v0.17' \ script='https://bootstrap.saltstack.com/develop' ''' if script is None: script = 'https://bootstrap.saltstack.com' client_opts = __opts__.copy() if roster is not None: client_opts['roster'] = roster if ssh_user is not None: client_opts['ssh_user'] = ssh_user if ssh_password is not None: client_opts['ssh_passwd'] = ssh_password if ssh_priv_key is not None: client_opts['ssh_priv'] = ssh_priv_key for host in hosts.split(','): client_opts['tgt'] = host client_opts['selected_target_option'] = 'glob' tmp_dir = '{0}-{1}/'.format(tmp_dir.rstrip('/'), uuid.uuid4()) deploy_command = os.path.join(tmp_dir, 'deploy.sh') try: client_opts['argv'] = ['file.makedirs', tmp_dir, 'mode=0700'] salt.client.ssh.SSH(client_opts).run() client_opts['argv'] = [ 'http.query', script, 'backend={0}'.format(http_backend), 'text_out={0}'.format(deploy_command) ] client = salt.client.ssh.SSH(client_opts).run() client_opts['argv'] = [ 'cmd.run', ' '.join(['sh', deploy_command, script_args]), 'python_shell=False' ] salt.client.ssh.SSH(client_opts).run() client_opts['argv'] = ['file.remove', tmp_dir] salt.client.ssh.SSH(client_opts).run() except SaltSystemExit as exc: log.error(six.text_type(exc))
def function[bootstrap, parameter[version, script, hosts, script_args, roster, ssh_user, ssh_password, ssh_priv_key, tmp_dir, http_backend]]: constant[ Bootstrap minions with salt-bootstrap version : develop Git tag of version to install script : https://bootstrap.saltstack.com URL containing the script to execute hosts Comma-separated hosts [example: hosts='host1.local,host2.local']. These hosts need to exist in the specified roster. script_args Any additional arguments that you want to pass to the script. .. versionadded:: 2016.11.0 roster : flat The roster to use for Salt SSH. More information about roster files can be found in :ref:`Salt's Roster Documentation <ssh-roster>`. A full list of roster types, see the :ref:`builtin roster modules <all-salt.roster>` documentation. .. versionadded:: 2016.11.0 ssh_user If ``user`` isn't found in the ``roster``, a default SSH user can be set here. Keep in mind that ``ssh_user`` will not override the roster ``user`` value if it is already defined. .. versionadded:: 2016.11.0 ssh_password If ``passwd`` isn't found in the ``roster``, a default SSH password can be set here. Keep in mind that ``ssh_password`` will not override the roster ``passwd`` value if it is already defined. .. versionadded:: 2016.11.0 ssh_privkey If ``priv`` isn't found in the ``roster``, a default SSH private key can be set here. Keep in mind that ``ssh_password`` will not override the roster ``passwd`` value if it is already defined. .. versionadded:: 2016.11.0 tmp_dir : /tmp/.bootstrap The temporary directory to download the bootstrap script in. This directory will have ``-<uuid4>`` appended to it. For example: ``/tmp/.bootstrap-a19a728e-d40a-4801-aba9-d00655c143a7/`` .. versionadded:: 2016.11.0 http_backend : tornado The backend library to use to download the script. If you need to use a ``file:///`` URL, then you should set this to ``urllib2``. .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt-run manage.bootstrap hosts='host1,host2' salt-run manage.bootstrap hosts='host1,host2' version='v0.17' salt-run manage.bootstrap hosts='host1,host2' version='v0.17' script='https://bootstrap.saltstack.com/develop' ] if compare[name[script] is constant[None]] begin[:] variable[script] assign[=] constant[https://bootstrap.saltstack.com] variable[client_opts] assign[=] call[name[__opts__].copy, parameter[]] if compare[name[roster] is_not constant[None]] begin[:] call[name[client_opts]][constant[roster]] assign[=] name[roster] if compare[name[ssh_user] is_not constant[None]] begin[:] call[name[client_opts]][constant[ssh_user]] assign[=] name[ssh_user] if compare[name[ssh_password] is_not constant[None]] begin[:] call[name[client_opts]][constant[ssh_passwd]] assign[=] name[ssh_password] if compare[name[ssh_priv_key] is_not constant[None]] begin[:] call[name[client_opts]][constant[ssh_priv]] assign[=] name[ssh_priv_key] for taget[name[host]] in starred[call[name[hosts].split, parameter[constant[,]]]] begin[:] call[name[client_opts]][constant[tgt]] assign[=] name[host] call[name[client_opts]][constant[selected_target_option]] assign[=] constant[glob] variable[tmp_dir] assign[=] call[constant[{0}-{1}/].format, parameter[call[name[tmp_dir].rstrip, parameter[constant[/]]], call[name[uuid].uuid4, parameter[]]]] variable[deploy_command] assign[=] call[name[os].path.join, parameter[name[tmp_dir], constant[deploy.sh]]] <ast.Try object at 0x7da1b2108dc0>
keyword[def] identifier[bootstrap] ( identifier[version] = literal[string] , identifier[script] = keyword[None] , identifier[hosts] = literal[string] , identifier[script_args] = literal[string] , identifier[roster] = literal[string] , identifier[ssh_user] = keyword[None] , identifier[ssh_password] = keyword[None] , identifier[ssh_priv_key] = keyword[None] , identifier[tmp_dir] = literal[string] , identifier[http_backend] = literal[string] ): literal[string] keyword[if] identifier[script] keyword[is] keyword[None] : identifier[script] = literal[string] identifier[client_opts] = identifier[__opts__] . identifier[copy] () keyword[if] identifier[roster] keyword[is] keyword[not] keyword[None] : identifier[client_opts] [ literal[string] ]= identifier[roster] keyword[if] identifier[ssh_user] keyword[is] keyword[not] keyword[None] : identifier[client_opts] [ literal[string] ]= identifier[ssh_user] keyword[if] identifier[ssh_password] keyword[is] keyword[not] keyword[None] : identifier[client_opts] [ literal[string] ]= identifier[ssh_password] keyword[if] identifier[ssh_priv_key] keyword[is] keyword[not] keyword[None] : identifier[client_opts] [ literal[string] ]= identifier[ssh_priv_key] keyword[for] identifier[host] keyword[in] identifier[hosts] . identifier[split] ( literal[string] ): identifier[client_opts] [ literal[string] ]= identifier[host] identifier[client_opts] [ literal[string] ]= literal[string] identifier[tmp_dir] = literal[string] . identifier[format] ( identifier[tmp_dir] . identifier[rstrip] ( literal[string] ), identifier[uuid] . identifier[uuid4] ()) identifier[deploy_command] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmp_dir] , literal[string] ) keyword[try] : identifier[client_opts] [ literal[string] ]=[ literal[string] , identifier[tmp_dir] , literal[string] ] identifier[salt] . identifier[client] . identifier[ssh] . identifier[SSH] ( identifier[client_opts] ). identifier[run] () identifier[client_opts] [ literal[string] ]=[ literal[string] , identifier[script] , literal[string] . identifier[format] ( identifier[http_backend] ), literal[string] . identifier[format] ( identifier[deploy_command] ) ] identifier[client] = identifier[salt] . identifier[client] . identifier[ssh] . identifier[SSH] ( identifier[client_opts] ). identifier[run] () identifier[client_opts] [ literal[string] ]=[ literal[string] , literal[string] . identifier[join] ([ literal[string] , identifier[deploy_command] , identifier[script_args] ]), literal[string] ] identifier[salt] . identifier[client] . identifier[ssh] . identifier[SSH] ( identifier[client_opts] ). identifier[run] () identifier[client_opts] [ literal[string] ]=[ literal[string] , identifier[tmp_dir] ] identifier[salt] . identifier[client] . identifier[ssh] . identifier[SSH] ( identifier[client_opts] ). identifier[run] () keyword[except] identifier[SaltSystemExit] keyword[as] identifier[exc] : identifier[log] . identifier[error] ( identifier[six] . identifier[text_type] ( identifier[exc] ))
def bootstrap(version='develop', script=None, hosts='', script_args='', roster='flat', ssh_user=None, ssh_password=None, ssh_priv_key=None, tmp_dir='/tmp/.bootstrap', http_backend='tornado'): """ Bootstrap minions with salt-bootstrap version : develop Git tag of version to install script : https://bootstrap.saltstack.com URL containing the script to execute hosts Comma-separated hosts [example: hosts='host1.local,host2.local']. These hosts need to exist in the specified roster. script_args Any additional arguments that you want to pass to the script. .. versionadded:: 2016.11.0 roster : flat The roster to use for Salt SSH. More information about roster files can be found in :ref:`Salt's Roster Documentation <ssh-roster>`. A full list of roster types, see the :ref:`builtin roster modules <all-salt.roster>` documentation. .. versionadded:: 2016.11.0 ssh_user If ``user`` isn't found in the ``roster``, a default SSH user can be set here. Keep in mind that ``ssh_user`` will not override the roster ``user`` value if it is already defined. .. versionadded:: 2016.11.0 ssh_password If ``passwd`` isn't found in the ``roster``, a default SSH password can be set here. Keep in mind that ``ssh_password`` will not override the roster ``passwd`` value if it is already defined. .. versionadded:: 2016.11.0 ssh_privkey If ``priv`` isn't found in the ``roster``, a default SSH private key can be set here. Keep in mind that ``ssh_password`` will not override the roster ``passwd`` value if it is already defined. .. versionadded:: 2016.11.0 tmp_dir : /tmp/.bootstrap The temporary directory to download the bootstrap script in. This directory will have ``-<uuid4>`` appended to it. For example: ``/tmp/.bootstrap-a19a728e-d40a-4801-aba9-d00655c143a7/`` .. versionadded:: 2016.11.0 http_backend : tornado The backend library to use to download the script. If you need to use a ``file:///`` URL, then you should set this to ``urllib2``. .. versionadded:: 2016.11.0 CLI Example: .. code-block:: bash salt-run manage.bootstrap hosts='host1,host2' salt-run manage.bootstrap hosts='host1,host2' version='v0.17' salt-run manage.bootstrap hosts='host1,host2' version='v0.17' script='https://bootstrap.saltstack.com/develop' """ if script is None: script = 'https://bootstrap.saltstack.com' # depends on [control=['if'], data=['script']] client_opts = __opts__.copy() if roster is not None: client_opts['roster'] = roster # depends on [control=['if'], data=['roster']] if ssh_user is not None: client_opts['ssh_user'] = ssh_user # depends on [control=['if'], data=['ssh_user']] if ssh_password is not None: client_opts['ssh_passwd'] = ssh_password # depends on [control=['if'], data=['ssh_password']] if ssh_priv_key is not None: client_opts['ssh_priv'] = ssh_priv_key # depends on [control=['if'], data=['ssh_priv_key']] for host in hosts.split(','): client_opts['tgt'] = host client_opts['selected_target_option'] = 'glob' tmp_dir = '{0}-{1}/'.format(tmp_dir.rstrip('/'), uuid.uuid4()) deploy_command = os.path.join(tmp_dir, 'deploy.sh') try: client_opts['argv'] = ['file.makedirs', tmp_dir, 'mode=0700'] salt.client.ssh.SSH(client_opts).run() client_opts['argv'] = ['http.query', script, 'backend={0}'.format(http_backend), 'text_out={0}'.format(deploy_command)] client = salt.client.ssh.SSH(client_opts).run() client_opts['argv'] = ['cmd.run', ' '.join(['sh', deploy_command, script_args]), 'python_shell=False'] salt.client.ssh.SSH(client_opts).run() client_opts['argv'] = ['file.remove', tmp_dir] salt.client.ssh.SSH(client_opts).run() # depends on [control=['try'], data=[]] except SaltSystemExit as exc: log.error(six.text_type(exc)) # depends on [control=['except'], data=['exc']] # depends on [control=['for'], data=['host']]
def directory_listing_content_check(directory_path, listing): """ Checks if a given listing is present under the given directory path. :param directory_path: The path to the base directory :param listing: The listing to check :return: None if no errors could be found, otherwise a string describing the error """ if listing: for sub in listing: path = os.path.join(directory_path, sub['basename']) if sub['class'] == 'File': if not os.path.isfile(path): return 'listing contains "{}" but this file could not be found on disk.'.format(path) elif sub['class'] == 'Directory': if not os.path.isdir(path): return 'listing contains "{}" but this directory could not be found on disk'.format(path) listing = sub.get('listing') if listing: return ConnectorManager.directory_listing_content_check(path, listing) return None
def function[directory_listing_content_check, parameter[directory_path, listing]]: constant[ Checks if a given listing is present under the given directory path. :param directory_path: The path to the base directory :param listing: The listing to check :return: None if no errors could be found, otherwise a string describing the error ] if name[listing] begin[:] for taget[name[sub]] in starred[name[listing]] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[name[directory_path], call[name[sub]][constant[basename]]]] if compare[call[name[sub]][constant[class]] equal[==] constant[File]] begin[:] if <ast.UnaryOp object at 0x7da1b107b790> begin[:] return[call[constant[listing contains "{}" but this file could not be found on disk.].format, parameter[name[path]]]] return[constant[None]]
keyword[def] identifier[directory_listing_content_check] ( identifier[directory_path] , identifier[listing] ): literal[string] keyword[if] identifier[listing] : keyword[for] identifier[sub] keyword[in] identifier[listing] : identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory_path] , identifier[sub] [ literal[string] ]) keyword[if] identifier[sub] [ literal[string] ]== literal[string] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ): keyword[return] literal[string] . identifier[format] ( identifier[path] ) keyword[elif] identifier[sub] [ literal[string] ]== literal[string] : keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): keyword[return] literal[string] . identifier[format] ( identifier[path] ) identifier[listing] = identifier[sub] . identifier[get] ( literal[string] ) keyword[if] identifier[listing] : keyword[return] identifier[ConnectorManager] . identifier[directory_listing_content_check] ( identifier[path] , identifier[listing] ) keyword[return] keyword[None]
def directory_listing_content_check(directory_path, listing): """ Checks if a given listing is present under the given directory path. :param directory_path: The path to the base directory :param listing: The listing to check :return: None if no errors could be found, otherwise a string describing the error """ if listing: for sub in listing: path = os.path.join(directory_path, sub['basename']) if sub['class'] == 'File': if not os.path.isfile(path): return 'listing contains "{}" but this file could not be found on disk.'.format(path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif sub['class'] == 'Directory': if not os.path.isdir(path): return 'listing contains "{}" but this directory could not be found on disk'.format(path) # depends on [control=['if'], data=[]] listing = sub.get('listing') if listing: return ConnectorManager.directory_listing_content_check(path, listing) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sub']] # depends on [control=['if'], data=[]] return None
def inverse(self): """Inverse of the quaternion object, encapsulated in a new instance. For a unit quaternion, this is the inverse rotation, i.e. when combined with the original rotation, will result in the null rotation. Returns: A new Quaternion object representing the inverse of this object """ ss = self._sum_of_squares() if ss > 0: return self.__class__(array=(self._vector_conjugate() / ss)) else: raise ZeroDivisionError("a zero quaternion (0 + 0i + 0j + 0k) cannot be inverted")
def function[inverse, parameter[self]]: constant[Inverse of the quaternion object, encapsulated in a new instance. For a unit quaternion, this is the inverse rotation, i.e. when combined with the original rotation, will result in the null rotation. Returns: A new Quaternion object representing the inverse of this object ] variable[ss] assign[=] call[name[self]._sum_of_squares, parameter[]] if compare[name[ss] greater[>] constant[0]] begin[:] return[call[name[self].__class__, parameter[]]]
keyword[def] identifier[inverse] ( identifier[self] ): literal[string] identifier[ss] = identifier[self] . identifier[_sum_of_squares] () keyword[if] identifier[ss] > literal[int] : keyword[return] identifier[self] . identifier[__class__] ( identifier[array] =( identifier[self] . identifier[_vector_conjugate] ()/ identifier[ss] )) keyword[else] : keyword[raise] identifier[ZeroDivisionError] ( literal[string] )
def inverse(self): """Inverse of the quaternion object, encapsulated in a new instance. For a unit quaternion, this is the inverse rotation, i.e. when combined with the original rotation, will result in the null rotation. Returns: A new Quaternion object representing the inverse of this object """ ss = self._sum_of_squares() if ss > 0: return self.__class__(array=self._vector_conjugate() / ss) # depends on [control=['if'], data=['ss']] else: raise ZeroDivisionError('a zero quaternion (0 + 0i + 0j + 0k) cannot be inverted')
def user_exists(username, token_manager=None, app_url=defaults.APP_URL): """ check if the user exists with the specified username """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = "%s/api/v1/accounts?username=%s" % (auth_url, username) response = requests.get(url, headers=headers) if response.status_code == 404: return False elif response.status_code == 200: return True else: raise JutException('Error %s: %s' % (response.status_code, response.text))
def function[user_exists, parameter[username, token_manager, app_url]]: constant[ check if the user exists with the specified username ] variable[headers] assign[=] call[name[token_manager].get_access_token_headers, parameter[]] variable[auth_url] assign[=] call[name[environment].get_auth_url, parameter[]] variable[url] assign[=] binary_operation[constant[%s/api/v1/accounts?username=%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1316b60>, <ast.Name object at 0x7da1b1316fe0>]]] variable[response] assign[=] call[name[requests].get, parameter[name[url]]] if compare[name[response].status_code equal[==] constant[404]] begin[:] return[constant[False]]
keyword[def] identifier[user_exists] ( identifier[username] , identifier[token_manager] = keyword[None] , identifier[app_url] = identifier[defaults] . identifier[APP_URL] ): literal[string] identifier[headers] = identifier[token_manager] . identifier[get_access_token_headers] () identifier[auth_url] = identifier[environment] . identifier[get_auth_url] ( identifier[app_url] = identifier[app_url] ) identifier[url] = literal[string] %( identifier[auth_url] , identifier[username] ) identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[headers] ) keyword[if] identifier[response] . identifier[status_code] == literal[int] : keyword[return] keyword[False] keyword[elif] identifier[response] . identifier[status_code] == literal[int] : keyword[return] keyword[True] keyword[else] : keyword[raise] identifier[JutException] ( literal[string] %( identifier[response] . identifier[status_code] , identifier[response] . identifier[text] ))
def user_exists(username, token_manager=None, app_url=defaults.APP_URL): """ check if the user exists with the specified username """ headers = token_manager.get_access_token_headers() auth_url = environment.get_auth_url(app_url=app_url) url = '%s/api/v1/accounts?username=%s' % (auth_url, username) response = requests.get(url, headers=headers) if response.status_code == 404: return False # depends on [control=['if'], data=[]] elif response.status_code == 200: return True # depends on [control=['if'], data=[]] else: raise JutException('Error %s: %s' % (response.status_code, response.text))
def run_wrap(self, args): """ Wrap some standard protocol around a command's run method. This wrapper should generally never capture exceptions. It can look at them and do things but prerun and postrun should always be symmetric. Any exception suppression should happen in the `session.execute`. """ self.fire_event('prerun', args) self.prerun(args) try: if self.session.allow_pager and self.use_pager: desc = 'Command\: %s' % '-'.join(self.prog.split()) with paging.pager_redirect(desc, **self.get_pager_spec()): result = self.run(args) else: result = self.run(args) except (SystemExit, Exception) as e: self.postrun(args, exc=e) self.fire_event('postrun', args, exc=e) raise e else: self.postrun(args, result=result) self.fire_event('postrun', args, result=result) return result
def function[run_wrap, parameter[self, args]]: constant[ Wrap some standard protocol around a command's run method. This wrapper should generally never capture exceptions. It can look at them and do things but prerun and postrun should always be symmetric. Any exception suppression should happen in the `session.execute`. ] call[name[self].fire_event, parameter[constant[prerun], name[args]]] call[name[self].prerun, parameter[name[args]]] <ast.Try object at 0x7da18eb55b70>
keyword[def] identifier[run_wrap] ( identifier[self] , identifier[args] ): literal[string] identifier[self] . identifier[fire_event] ( literal[string] , identifier[args] ) identifier[self] . identifier[prerun] ( identifier[args] ) keyword[try] : keyword[if] identifier[self] . identifier[session] . identifier[allow_pager] keyword[and] identifier[self] . identifier[use_pager] : identifier[desc] = literal[string] % literal[string] . identifier[join] ( identifier[self] . identifier[prog] . identifier[split] ()) keyword[with] identifier[paging] . identifier[pager_redirect] ( identifier[desc] ,** identifier[self] . identifier[get_pager_spec] ()): identifier[result] = identifier[self] . identifier[run] ( identifier[args] ) keyword[else] : identifier[result] = identifier[self] . identifier[run] ( identifier[args] ) keyword[except] ( identifier[SystemExit] , identifier[Exception] ) keyword[as] identifier[e] : identifier[self] . identifier[postrun] ( identifier[args] , identifier[exc] = identifier[e] ) identifier[self] . identifier[fire_event] ( literal[string] , identifier[args] , identifier[exc] = identifier[e] ) keyword[raise] identifier[e] keyword[else] : identifier[self] . identifier[postrun] ( identifier[args] , identifier[result] = identifier[result] ) identifier[self] . identifier[fire_event] ( literal[string] , identifier[args] , identifier[result] = identifier[result] ) keyword[return] identifier[result]
def run_wrap(self, args): """ Wrap some standard protocol around a command's run method. This wrapper should generally never capture exceptions. It can look at them and do things but prerun and postrun should always be symmetric. Any exception suppression should happen in the `session.execute`. """ self.fire_event('prerun', args) self.prerun(args) try: if self.session.allow_pager and self.use_pager: desc = 'Command\\: %s' % '-'.join(self.prog.split()) with paging.pager_redirect(desc, **self.get_pager_spec()): result = self.run(args) # depends on [control=['with'], data=[]] # depends on [control=['if'], data=[]] else: result = self.run(args) # depends on [control=['try'], data=[]] except (SystemExit, Exception) as e: self.postrun(args, exc=e) self.fire_event('postrun', args, exc=e) raise e # depends on [control=['except'], data=['e']] else: self.postrun(args, result=result) self.fire_event('postrun', args, result=result) return result
def remap_variables(fn): """ Use fn to map the output of any variable getter. Args: fn (tf.Variable -> tf.Tensor) Returns: The current variable scope with a custom_getter that maps all the variables by fn. Example: .. code-block:: python with varreplace.remap_variables(lambda var: quantize(var)): x = FullyConnected('fc', x, 1000) # fc/{W,b} will be quantized """ def custom_getter(getter, *args, **kwargs): v = getter(*args, **kwargs) return fn(v) return custom_getter_scope(custom_getter)
def function[remap_variables, parameter[fn]]: constant[ Use fn to map the output of any variable getter. Args: fn (tf.Variable -> tf.Tensor) Returns: The current variable scope with a custom_getter that maps all the variables by fn. Example: .. code-block:: python with varreplace.remap_variables(lambda var: quantize(var)): x = FullyConnected('fc', x, 1000) # fc/{W,b} will be quantized ] def function[custom_getter, parameter[getter]]: variable[v] assign[=] call[name[getter], parameter[<ast.Starred object at 0x7da18bcc86a0>]] return[call[name[fn], parameter[name[v]]]] return[call[name[custom_getter_scope], parameter[name[custom_getter]]]]
keyword[def] identifier[remap_variables] ( identifier[fn] ): literal[string] keyword[def] identifier[custom_getter] ( identifier[getter] ,* identifier[args] ,** identifier[kwargs] ): identifier[v] = identifier[getter] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[fn] ( identifier[v] ) keyword[return] identifier[custom_getter_scope] ( identifier[custom_getter] )
def remap_variables(fn): """ Use fn to map the output of any variable getter. Args: fn (tf.Variable -> tf.Tensor) Returns: The current variable scope with a custom_getter that maps all the variables by fn. Example: .. code-block:: python with varreplace.remap_variables(lambda var: quantize(var)): x = FullyConnected('fc', x, 1000) # fc/{W,b} will be quantized """ def custom_getter(getter, *args, **kwargs): v = getter(*args, **kwargs) return fn(v) return custom_getter_scope(custom_getter)
def y(self, y): """Project y as x""" if y is None: return None if self._force_vertical: return super(HorizontalView, self).y(y) return super(HorizontalView, self).x(y)
def function[y, parameter[self, y]]: constant[Project y as x] if compare[name[y] is constant[None]] begin[:] return[constant[None]] if name[self]._force_vertical begin[:] return[call[call[name[super], parameter[name[HorizontalView], name[self]]].y, parameter[name[y]]]] return[call[call[name[super], parameter[name[HorizontalView], name[self]]].x, parameter[name[y]]]]
keyword[def] identifier[y] ( identifier[self] , identifier[y] ): literal[string] keyword[if] identifier[y] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[self] . identifier[_force_vertical] : keyword[return] identifier[super] ( identifier[HorizontalView] , identifier[self] ). identifier[y] ( identifier[y] ) keyword[return] identifier[super] ( identifier[HorizontalView] , identifier[self] ). identifier[x] ( identifier[y] )
def y(self, y): """Project y as x""" if y is None: return None # depends on [control=['if'], data=[]] if self._force_vertical: return super(HorizontalView, self).y(y) # depends on [control=['if'], data=[]] return super(HorizontalView, self).x(y)
def set_misc(self): '''Set more parameters, after the tank is better defined than in the __init__ function. Notes ----- Two of D, L, and L_over_D must be known when this function runs. The other one is set from the other two first thing in this function. a_ratio parameters are used to calculate a values for the heads here, if applicable. Radius is calculated here. Maximum tank height is calculated here. V_total is calculated here. ''' if self.D and self.L: # If L and D are known, get L_over_D self.L_over_D = self.L/self.D elif self.D and self.L_over_D: # Otherwise, if L_over_D and D are provided, get L self.L = self.D*self.L_over_D elif self.L and self.L_over_D: # Otherwise, if L_over_D and L are provided, get D self.D = self.L/self.L_over_D # Calculate diameter self.R = self.D/2. # If a_ratio is provided for either heads, use it. if self.sideA and self.D: if not self.sideA_a and self.sideA in ['conical', 'ellipsoidal', 'guppy', 'spherical']: self.sideA_a = self.D*self.sideA_a_ratio if self.sideB and self.D: if not self.sideB_a and self.sideB in ['conical', 'ellipsoidal', 'guppy', 'spherical']: self.sideB_a = self.D*self.sideB_a_ratio # Calculate a for torispherical heads if self.sideA == 'torispherical' and self.sideA_f and self.sideA_k: self.sideA_a = a_torispherical(self.D, self.sideA_f, self.sideA_k) if self.sideB == 'torispherical' and self.sideB_f and self.sideB_k: self.sideB_a = a_torispherical(self.D, self.sideB_f, self.sideB_k) # Calculate maximum tank height, h_max if self.horizontal: self.h_max = self.D else: self.h_max = self.L if self.sideA_a: self.h_max += self.sideA_a if self.sideB_a: self.h_max += self.sideB_a # Set maximum height self.V_total = self.V_from_h(self.h_max) # Set surface areas self.A, (self.A_sideA, self.A_sideB, self.A_lateral) = SA_tank( D=self.D, L=self.L, sideA=self.sideA, sideB=self.sideB, sideA_a=self.sideA_a, sideB_a=self.sideB_a, sideA_f=self.sideA_f, sideA_k=self.sideA_k, sideB_f=self.sideB_f, sideB_k=self.sideB_k, full_output=True)
def function[set_misc, parameter[self]]: constant[Set more parameters, after the tank is better defined than in the __init__ function. Notes ----- Two of D, L, and L_over_D must be known when this function runs. The other one is set from the other two first thing in this function. a_ratio parameters are used to calculate a values for the heads here, if applicable. Radius is calculated here. Maximum tank height is calculated here. V_total is calculated here. ] if <ast.BoolOp object at 0x7da1b1264880> begin[:] name[self].L_over_D assign[=] binary_operation[name[self].L / name[self].D] name[self].R assign[=] binary_operation[name[self].D / constant[2.0]] if <ast.BoolOp object at 0x7da1b1264070> begin[:] if <ast.BoolOp object at 0x7da1b1264df0> begin[:] name[self].sideA_a assign[=] binary_operation[name[self].D * name[self].sideA_a_ratio] if <ast.BoolOp object at 0x7da1b12ca680> begin[:] if <ast.BoolOp object at 0x7da1b12c8d00> begin[:] name[self].sideB_a assign[=] binary_operation[name[self].D * name[self].sideB_a_ratio] if <ast.BoolOp object at 0x7da1b12c8340> begin[:] name[self].sideA_a assign[=] call[name[a_torispherical], parameter[name[self].D, name[self].sideA_f, name[self].sideA_k]] if <ast.BoolOp object at 0x7da1b12c95d0> begin[:] name[self].sideB_a assign[=] call[name[a_torispherical], parameter[name[self].D, name[self].sideB_f, name[self].sideB_k]] if name[self].horizontal begin[:] name[self].h_max assign[=] name[self].D name[self].V_total assign[=] call[name[self].V_from_h, parameter[name[self].h_max]] <ast.Tuple object at 0x7da1b12cbf40> assign[=] call[name[SA_tank], parameter[]]
keyword[def] identifier[set_misc] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[D] keyword[and] identifier[self] . identifier[L] : identifier[self] . identifier[L_over_D] = identifier[self] . identifier[L] / identifier[self] . identifier[D] keyword[elif] identifier[self] . identifier[D] keyword[and] identifier[self] . identifier[L_over_D] : identifier[self] . identifier[L] = identifier[self] . identifier[D] * identifier[self] . identifier[L_over_D] keyword[elif] identifier[self] . identifier[L] keyword[and] identifier[self] . identifier[L_over_D] : identifier[self] . identifier[D] = identifier[self] . identifier[L] / identifier[self] . identifier[L_over_D] identifier[self] . identifier[R] = identifier[self] . identifier[D] / literal[int] keyword[if] identifier[self] . identifier[sideA] keyword[and] identifier[self] . identifier[D] : keyword[if] keyword[not] identifier[self] . identifier[sideA_a] keyword[and] identifier[self] . identifier[sideA] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[self] . identifier[sideA_a] = identifier[self] . identifier[D] * identifier[self] . identifier[sideA_a_ratio] keyword[if] identifier[self] . identifier[sideB] keyword[and] identifier[self] . identifier[D] : keyword[if] keyword[not] identifier[self] . identifier[sideB_a] keyword[and] identifier[self] . identifier[sideB] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[self] . identifier[sideB_a] = identifier[self] . identifier[D] * identifier[self] . identifier[sideB_a_ratio] keyword[if] identifier[self] . identifier[sideA] == literal[string] keyword[and] identifier[self] . identifier[sideA_f] keyword[and] identifier[self] . identifier[sideA_k] : identifier[self] . identifier[sideA_a] = identifier[a_torispherical] ( identifier[self] . identifier[D] , identifier[self] . identifier[sideA_f] , identifier[self] . identifier[sideA_k] ) keyword[if] identifier[self] . identifier[sideB] == literal[string] keyword[and] identifier[self] . identifier[sideB_f] keyword[and] identifier[self] . identifier[sideB_k] : identifier[self] . identifier[sideB_a] = identifier[a_torispherical] ( identifier[self] . identifier[D] , identifier[self] . identifier[sideB_f] , identifier[self] . identifier[sideB_k] ) keyword[if] identifier[self] . identifier[horizontal] : identifier[self] . identifier[h_max] = identifier[self] . identifier[D] keyword[else] : identifier[self] . identifier[h_max] = identifier[self] . identifier[L] keyword[if] identifier[self] . identifier[sideA_a] : identifier[self] . identifier[h_max] += identifier[self] . identifier[sideA_a] keyword[if] identifier[self] . identifier[sideB_a] : identifier[self] . identifier[h_max] += identifier[self] . identifier[sideB_a] identifier[self] . identifier[V_total] = identifier[self] . identifier[V_from_h] ( identifier[self] . identifier[h_max] ) identifier[self] . identifier[A] ,( identifier[self] . identifier[A_sideA] , identifier[self] . identifier[A_sideB] , identifier[self] . identifier[A_lateral] )= identifier[SA_tank] ( identifier[D] = identifier[self] . identifier[D] , identifier[L] = identifier[self] . identifier[L] , identifier[sideA] = identifier[self] . identifier[sideA] , identifier[sideB] = identifier[self] . identifier[sideB] , identifier[sideA_a] = identifier[self] . identifier[sideA_a] , identifier[sideB_a] = identifier[self] . identifier[sideB_a] , identifier[sideA_f] = identifier[self] . identifier[sideA_f] , identifier[sideA_k] = identifier[self] . identifier[sideA_k] , identifier[sideB_f] = identifier[self] . identifier[sideB_f] , identifier[sideB_k] = identifier[self] . identifier[sideB_k] , identifier[full_output] = keyword[True] )
def set_misc(self): """Set more parameters, after the tank is better defined than in the __init__ function. Notes ----- Two of D, L, and L_over_D must be known when this function runs. The other one is set from the other two first thing in this function. a_ratio parameters are used to calculate a values for the heads here, if applicable. Radius is calculated here. Maximum tank height is calculated here. V_total is calculated here. """ if self.D and self.L: # If L and D are known, get L_over_D self.L_over_D = self.L / self.D # depends on [control=['if'], data=[]] elif self.D and self.L_over_D: # Otherwise, if L_over_D and D are provided, get L self.L = self.D * self.L_over_D # depends on [control=['if'], data=[]] elif self.L and self.L_over_D: # Otherwise, if L_over_D and L are provided, get D self.D = self.L / self.L_over_D # depends on [control=['if'], data=[]] # Calculate diameter self.R = self.D / 2.0 # If a_ratio is provided for either heads, use it. if self.sideA and self.D: if not self.sideA_a and self.sideA in ['conical', 'ellipsoidal', 'guppy', 'spherical']: self.sideA_a = self.D * self.sideA_a_ratio # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if self.sideB and self.D: if not self.sideB_a and self.sideB in ['conical', 'ellipsoidal', 'guppy', 'spherical']: self.sideB_a = self.D * self.sideB_a_ratio # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Calculate a for torispherical heads if self.sideA == 'torispherical' and self.sideA_f and self.sideA_k: self.sideA_a = a_torispherical(self.D, self.sideA_f, self.sideA_k) # depends on [control=['if'], data=[]] if self.sideB == 'torispherical' and self.sideB_f and self.sideB_k: self.sideB_a = a_torispherical(self.D, self.sideB_f, self.sideB_k) # depends on [control=['if'], data=[]] # Calculate maximum tank height, h_max if self.horizontal: self.h_max = self.D # depends on [control=['if'], data=[]] else: self.h_max = self.L if self.sideA_a: self.h_max += self.sideA_a # depends on [control=['if'], data=[]] if self.sideB_a: self.h_max += self.sideB_a # depends on [control=['if'], data=[]] # Set maximum height self.V_total = self.V_from_h(self.h_max) # Set surface areas (self.A, (self.A_sideA, self.A_sideB, self.A_lateral)) = SA_tank(D=self.D, L=self.L, sideA=self.sideA, sideB=self.sideB, sideA_a=self.sideA_a, sideB_a=self.sideB_a, sideA_f=self.sideA_f, sideA_k=self.sideA_k, sideB_f=self.sideB_f, sideB_k=self.sideB_k, full_output=True)
def delete_queue(self, queue_name, fail_not_exist=False): """Delete a queue entity. :param queue_name: The name of the queue to delete. :type queue_name: str :param fail_not_exist: Whether to raise an exception if the named queue is not found. If set to True, a ServiceBusResourceNotFound will be raised. Default value is False. :type fail_not_exist: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namesapce is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the queue is not found and `fail_not_exist` is set to True. """ try: return self.mgmt_client.delete_queue(queue_name, fail_not_exist=fail_not_exist) except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError("Namespace: {} not found".format(self.service_namespace), e) except azure.common.AzureMissingResourceHttpError as e: raise ServiceBusResourceNotFound("Specificed queue '{}' does not exist.".format(queue_name), e)
def function[delete_queue, parameter[self, queue_name, fail_not_exist]]: constant[Delete a queue entity. :param queue_name: The name of the queue to delete. :type queue_name: str :param fail_not_exist: Whether to raise an exception if the named queue is not found. If set to True, a ServiceBusResourceNotFound will be raised. Default value is False. :type fail_not_exist: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namesapce is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the queue is not found and `fail_not_exist` is set to True. ] <ast.Try object at 0x7da1b0356b00>
keyword[def] identifier[delete_queue] ( identifier[self] , identifier[queue_name] , identifier[fail_not_exist] = keyword[False] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[mgmt_client] . identifier[delete_queue] ( identifier[queue_name] , identifier[fail_not_exist] = identifier[fail_not_exist] ) keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] keyword[as] identifier[e] : keyword[raise] identifier[ServiceBusConnectionError] ( literal[string] . identifier[format] ( identifier[self] . identifier[service_namespace] ), identifier[e] ) keyword[except] identifier[azure] . identifier[common] . identifier[AzureMissingResourceHttpError] keyword[as] identifier[e] : keyword[raise] identifier[ServiceBusResourceNotFound] ( literal[string] . identifier[format] ( identifier[queue_name] ), identifier[e] )
def delete_queue(self, queue_name, fail_not_exist=False): """Delete a queue entity. :param queue_name: The name of the queue to delete. :type queue_name: str :param fail_not_exist: Whether to raise an exception if the named queue is not found. If set to True, a ServiceBusResourceNotFound will be raised. Default value is False. :type fail_not_exist: bool :raises: ~azure.servicebus.common.errors.ServiceBusConnectionError if the namesapce is not found. :raises: ~azure.servicebus.common.errors.ServiceBusResourceNotFound if the queue is not found and `fail_not_exist` is set to True. """ try: return self.mgmt_client.delete_queue(queue_name, fail_not_exist=fail_not_exist) # depends on [control=['try'], data=[]] except requests.exceptions.ConnectionError as e: raise ServiceBusConnectionError('Namespace: {} not found'.format(self.service_namespace), e) # depends on [control=['except'], data=['e']] except azure.common.AzureMissingResourceHttpError as e: raise ServiceBusResourceNotFound("Specificed queue '{}' does not exist.".format(queue_name), e) # depends on [control=['except'], data=['e']]
def get_include(): """ Returns a list of header include paths (for lxml itself, libxml2 and libxslt) needed to compile C code against lxml if it was built with statically linked libraries. """ import os lxml_path = __path__[0] include_path = os.path.join(lxml_path, 'includes') includes = [include_path, lxml_path] for name in os.listdir(include_path): path = os.path.join(include_path, name) if os.path.isdir(path): includes.append(path) return includes
def function[get_include, parameter[]]: constant[ Returns a list of header include paths (for lxml itself, libxml2 and libxslt) needed to compile C code against lxml if it was built with statically linked libraries. ] import module[os] variable[lxml_path] assign[=] call[name[__path__]][constant[0]] variable[include_path] assign[=] call[name[os].path.join, parameter[name[lxml_path], constant[includes]]] variable[includes] assign[=] list[[<ast.Name object at 0x7da18bc70cd0>, <ast.Name object at 0x7da18bc71810>]] for taget[name[name]] in starred[call[name[os].listdir, parameter[name[include_path]]]] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[name[include_path], name[name]]] if call[name[os].path.isdir, parameter[name[path]]] begin[:] call[name[includes].append, parameter[name[path]]] return[name[includes]]
keyword[def] identifier[get_include] (): literal[string] keyword[import] identifier[os] identifier[lxml_path] = identifier[__path__] [ literal[int] ] identifier[include_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[lxml_path] , literal[string] ) identifier[includes] =[ identifier[include_path] , identifier[lxml_path] ] keyword[for] identifier[name] keyword[in] identifier[os] . identifier[listdir] ( identifier[include_path] ): identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[include_path] , identifier[name] ) keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): identifier[includes] . identifier[append] ( identifier[path] ) keyword[return] identifier[includes]
def get_include(): """ Returns a list of header include paths (for lxml itself, libxml2 and libxslt) needed to compile C code against lxml if it was built with statically linked libraries. """ import os lxml_path = __path__[0] include_path = os.path.join(lxml_path, 'includes') includes = [include_path, lxml_path] for name in os.listdir(include_path): path = os.path.join(include_path, name) if os.path.isdir(path): includes.append(path) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']] return includes
def create_db(): """ Create all your database objects (SQLAlchemy specific). """ from flask_appbuilder.models.sqla import Model engine = current_app.appbuilder.get_session.get_bind(mapper=None, clause=None) Model.metadata.create_all(engine) click.echo(click.style("DB objects created", fg="green"))
def function[create_db, parameter[]]: constant[ Create all your database objects (SQLAlchemy specific). ] from relative_module[flask_appbuilder.models.sqla] import module[Model] variable[engine] assign[=] call[name[current_app].appbuilder.get_session.get_bind, parameter[]] call[name[Model].metadata.create_all, parameter[name[engine]]] call[name[click].echo, parameter[call[name[click].style, parameter[constant[DB objects created]]]]]
keyword[def] identifier[create_db] (): literal[string] keyword[from] identifier[flask_appbuilder] . identifier[models] . identifier[sqla] keyword[import] identifier[Model] identifier[engine] = identifier[current_app] . identifier[appbuilder] . identifier[get_session] . identifier[get_bind] ( identifier[mapper] = keyword[None] , identifier[clause] = keyword[None] ) identifier[Model] . identifier[metadata] . identifier[create_all] ( identifier[engine] ) identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[fg] = literal[string] ))
def create_db(): """ Create all your database objects (SQLAlchemy specific). """ from flask_appbuilder.models.sqla import Model engine = current_app.appbuilder.get_session.get_bind(mapper=None, clause=None) Model.metadata.create_all(engine) click.echo(click.style('DB objects created', fg='green'))
def get_script_property(value, is_bytes=False): """Get `SC` property.""" obj = unidata.ascii_scripts if is_bytes else unidata.unicode_scripts if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['script'].get(negated, negated) else: value = unidata.unicode_alias['script'].get(value, value) return obj[value]
def function[get_script_property, parameter[value, is_bytes]]: constant[Get `SC` property.] variable[obj] assign[=] <ast.IfExp object at 0x7da1b032f1c0> if call[name[value].startswith, parameter[constant[^]]] begin[:] variable[negated] assign[=] call[name[value]][<ast.Slice object at 0x7da1b032e950>] variable[value] assign[=] binary_operation[constant[^] + call[call[name[unidata].unicode_alias][constant[script]].get, parameter[name[negated], name[negated]]]] return[call[name[obj]][name[value]]]
keyword[def] identifier[get_script_property] ( identifier[value] , identifier[is_bytes] = keyword[False] ): literal[string] identifier[obj] = identifier[unidata] . identifier[ascii_scripts] keyword[if] identifier[is_bytes] keyword[else] identifier[unidata] . identifier[unicode_scripts] keyword[if] identifier[value] . identifier[startswith] ( literal[string] ): identifier[negated] = identifier[value] [ literal[int] :] identifier[value] = literal[string] + identifier[unidata] . identifier[unicode_alias] [ literal[string] ]. identifier[get] ( identifier[negated] , identifier[negated] ) keyword[else] : identifier[value] = identifier[unidata] . identifier[unicode_alias] [ literal[string] ]. identifier[get] ( identifier[value] , identifier[value] ) keyword[return] identifier[obj] [ identifier[value] ]
def get_script_property(value, is_bytes=False): """Get `SC` property.""" obj = unidata.ascii_scripts if is_bytes else unidata.unicode_scripts if value.startswith('^'): negated = value[1:] value = '^' + unidata.unicode_alias['script'].get(negated, negated) # depends on [control=['if'], data=[]] else: value = unidata.unicode_alias['script'].get(value, value) return obj[value]
def list(self, request, *args, **kwargs): """ To get a list of alerts, run **GET** against */api/alerts/* as authenticated user. Alert severity field can take one of this values: "Error", "Warning", "Info", "Debug". Field scope will contain link to object that cause alert. Context - dictionary that contains information about all related to alert objects. Alerts can be filtered by: - ?severity=<severity> (can be list) - ?alert_type=<alert_type> (can be list) - ?scope=<url> concrete alert scope - ?scope_type=<string> name of scope type (Ex.: instance, service_project_link, project...) DEPRECATED use ?content_type instead - ?created_from=<timestamp> - ?created_to=<timestamp> - ?closed_from=<timestamp> - ?closed_to=<timestamp> - ?from=<timestamp> - filter alerts that was active from given date - ?to=<timestamp> - filter alerts that was active to given date - ?opened - if this argument is in GET request endpoint will return only alerts that are not closed - ?closed - if this argument is in GET request endpoint will return only alerts that are closed - ?aggregate=aggregate_model_name (default: 'customer'. Have to be from list: 'customer', project') - ?uuid=uuid_of_aggregate_model_object (not required. If this parameter will be defined - result ill contain only object with given uuid) - ?acknowledged=True|False - show only acknowledged (non-acknowledged) alerts - ?content_type=<string> name of scope content type in format <app_name>.<scope_type> (Ex.: structure.project, openstack.instance...) - ?exclude_features=<feature> (can be list) - exclude alert from output if it's type corresponds o one of given features Alerts can be ordered by: -?o=severity - order by severity -?o=created - order by creation time .. code-block:: http GET /api/alerts/ Accept: application/json Content-Type: application/json Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com [ { "url": "http://example.com/api/alerts/e80e48a4e58b48ff9a1320a0aa0d68ab/", "uuid": "e80e48a4e58b48ff9a1320a0aa0d68ab", "alert_type": "first_alert", "message": "message#1", "severity": "Debug", "scope": "http://example.com/api/instances/9d1d7e03b0d14fd0b42b5f649dfa3de5/", "created": "2015-05-29T14:24:27.342Z", "closed": null, "context": { 'customer_abbreviation': 'customer_abbreviation', 'customer_contact_details': 'customer details', 'customer_name': 'Customer name', 'customer_uuid': '53c6e86406e349faa7924f4c865b15ab', 'quota_limit': '131072.0', 'quota_name': 'ram', 'quota_usage': '131071', 'quota_uuid': 'f6ae2f7ca86f4e2f9bb64de1015a2815', 'scope_name': 'project X', 'scope_uuid': '0238d71ee1934bd2839d4e71e5f9b91a' } "acknowledged": true, } ] """ return super(AlertViewSet, self).list(request, *args, **kwargs)
def function[list, parameter[self, request]]: constant[ To get a list of alerts, run **GET** against */api/alerts/* as authenticated user. Alert severity field can take one of this values: "Error", "Warning", "Info", "Debug". Field scope will contain link to object that cause alert. Context - dictionary that contains information about all related to alert objects. Alerts can be filtered by: - ?severity=<severity> (can be list) - ?alert_type=<alert_type> (can be list) - ?scope=<url> concrete alert scope - ?scope_type=<string> name of scope type (Ex.: instance, service_project_link, project...) DEPRECATED use ?content_type instead - ?created_from=<timestamp> - ?created_to=<timestamp> - ?closed_from=<timestamp> - ?closed_to=<timestamp> - ?from=<timestamp> - filter alerts that was active from given date - ?to=<timestamp> - filter alerts that was active to given date - ?opened - if this argument is in GET request endpoint will return only alerts that are not closed - ?closed - if this argument is in GET request endpoint will return only alerts that are closed - ?aggregate=aggregate_model_name (default: 'customer'. Have to be from list: 'customer', project') - ?uuid=uuid_of_aggregate_model_object (not required. If this parameter will be defined - result ill contain only object with given uuid) - ?acknowledged=True|False - show only acknowledged (non-acknowledged) alerts - ?content_type=<string> name of scope content type in format <app_name>.<scope_type> (Ex.: structure.project, openstack.instance...) - ?exclude_features=<feature> (can be list) - exclude alert from output if it's type corresponds o one of given features Alerts can be ordered by: -?o=severity - order by severity -?o=created - order by creation time .. code-block:: http GET /api/alerts/ Accept: application/json Content-Type: application/json Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com [ { "url": "http://example.com/api/alerts/e80e48a4e58b48ff9a1320a0aa0d68ab/", "uuid": "e80e48a4e58b48ff9a1320a0aa0d68ab", "alert_type": "first_alert", "message": "message#1", "severity": "Debug", "scope": "http://example.com/api/instances/9d1d7e03b0d14fd0b42b5f649dfa3de5/", "created": "2015-05-29T14:24:27.342Z", "closed": null, "context": { 'customer_abbreviation': 'customer_abbreviation', 'customer_contact_details': 'customer details', 'customer_name': 'Customer name', 'customer_uuid': '53c6e86406e349faa7924f4c865b15ab', 'quota_limit': '131072.0', 'quota_name': 'ram', 'quota_usage': '131071', 'quota_uuid': 'f6ae2f7ca86f4e2f9bb64de1015a2815', 'scope_name': 'project X', 'scope_uuid': '0238d71ee1934bd2839d4e71e5f9b91a' } "acknowledged": true, } ] ] return[call[call[name[super], parameter[name[AlertViewSet], name[self]]].list, parameter[name[request], <ast.Starred object at 0x7da1b0eda560>]]]
keyword[def] identifier[list] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[super] ( identifier[AlertViewSet] , identifier[self] ). identifier[list] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
def list(self, request, *args, **kwargs): """ To get a list of alerts, run **GET** against */api/alerts/* as authenticated user. Alert severity field can take one of this values: "Error", "Warning", "Info", "Debug". Field scope will contain link to object that cause alert. Context - dictionary that contains information about all related to alert objects. Alerts can be filtered by: - ?severity=<severity> (can be list) - ?alert_type=<alert_type> (can be list) - ?scope=<url> concrete alert scope - ?scope_type=<string> name of scope type (Ex.: instance, service_project_link, project...) DEPRECATED use ?content_type instead - ?created_from=<timestamp> - ?created_to=<timestamp> - ?closed_from=<timestamp> - ?closed_to=<timestamp> - ?from=<timestamp> - filter alerts that was active from given date - ?to=<timestamp> - filter alerts that was active to given date - ?opened - if this argument is in GET request endpoint will return only alerts that are not closed - ?closed - if this argument is in GET request endpoint will return only alerts that are closed - ?aggregate=aggregate_model_name (default: 'customer'. Have to be from list: 'customer', project') - ?uuid=uuid_of_aggregate_model_object (not required. If this parameter will be defined - result ill contain only object with given uuid) - ?acknowledged=True|False - show only acknowledged (non-acknowledged) alerts - ?content_type=<string> name of scope content type in format <app_name>.<scope_type> (Ex.: structure.project, openstack.instance...) - ?exclude_features=<feature> (can be list) - exclude alert from output if it's type corresponds o one of given features Alerts can be ordered by: -?o=severity - order by severity -?o=created - order by creation time .. code-block:: http GET /api/alerts/ Accept: application/json Content-Type: application/json Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com [ { "url": "http://example.com/api/alerts/e80e48a4e58b48ff9a1320a0aa0d68ab/", "uuid": "e80e48a4e58b48ff9a1320a0aa0d68ab", "alert_type": "first_alert", "message": "message#1", "severity": "Debug", "scope": "http://example.com/api/instances/9d1d7e03b0d14fd0b42b5f649dfa3de5/", "created": "2015-05-29T14:24:27.342Z", "closed": null, "context": { 'customer_abbreviation': 'customer_abbreviation', 'customer_contact_details': 'customer details', 'customer_name': 'Customer name', 'customer_uuid': '53c6e86406e349faa7924f4c865b15ab', 'quota_limit': '131072.0', 'quota_name': 'ram', 'quota_usage': '131071', 'quota_uuid': 'f6ae2f7ca86f4e2f9bb64de1015a2815', 'scope_name': 'project X', 'scope_uuid': '0238d71ee1934bd2839d4e71e5f9b91a' } "acknowledged": true, } ] """ return super(AlertViewSet, self).list(request, *args, **kwargs)
def get_balance(self, address): """ returns the balance object from blockcypher for address """ url_append = "/balance?token=%s" % self.api_key url = self.base_url("addrs/%s" % (address + url_append)) result = json.loads(urlopen(url).read().decode("utf8")) return result
def function[get_balance, parameter[self, address]]: constant[ returns the balance object from blockcypher for address ] variable[url_append] assign[=] binary_operation[constant[/balance?token=%s] <ast.Mod object at 0x7da2590d6920> name[self].api_key] variable[url] assign[=] call[name[self].base_url, parameter[binary_operation[constant[addrs/%s] <ast.Mod object at 0x7da2590d6920> binary_operation[name[address] + name[url_append]]]]] variable[result] assign[=] call[name[json].loads, parameter[call[call[call[name[urlopen], parameter[name[url]]].read, parameter[]].decode, parameter[constant[utf8]]]]] return[name[result]]
keyword[def] identifier[get_balance] ( identifier[self] , identifier[address] ): literal[string] identifier[url_append] = literal[string] % identifier[self] . identifier[api_key] identifier[url] = identifier[self] . identifier[base_url] ( literal[string] %( identifier[address] + identifier[url_append] )) identifier[result] = identifier[json] . identifier[loads] ( identifier[urlopen] ( identifier[url] ). identifier[read] (). identifier[decode] ( literal[string] )) keyword[return] identifier[result]
def get_balance(self, address): """ returns the balance object from blockcypher for address """ url_append = '/balance?token=%s' % self.api_key url = self.base_url('addrs/%s' % (address + url_append)) result = json.loads(urlopen(url).read().decode('utf8')) return result
def take_bug_report(self, test_name, begin_time, timeout=300, destination=None): """Takes a bug report on the device and stores it in a file. Args: test_name: Name of the test method that triggered this bug report. begin_time: Timestamp of when the test started. timeout: float, the number of seconds to wait for bugreport to complete, default is 5min. destination: string, path to the directory where the bugreport should be saved. """ new_br = True try: stdout = self.adb.shell('bugreportz -v').decode('utf-8') # This check is necessary for builds before N, where adb shell's ret # code and stderr are not propagated properly. if 'not found' in stdout: new_br = False except adb.AdbError: new_br = False if destination: br_path = utils.abs_path(destination) else: br_path = os.path.join(self.log_path, 'BugReports') utils.create_dir(br_path) base_name = ',%s,%s.txt' % (begin_time, self._normalized_serial) if new_br: base_name = base_name.replace('.txt', '.zip') test_name_len = utils.MAX_FILENAME_LEN - len(base_name) out_name = test_name[:test_name_len] + base_name full_out_path = os.path.join(br_path, out_name.replace(' ', r'\ ')) # in case device restarted, wait for adb interface to return self.wait_for_boot_completion() self.log.info('Taking bugreport for %s.', test_name) if new_br: out = self.adb.shell('bugreportz', timeout=timeout).decode('utf-8') if not out.startswith('OK'): raise DeviceError(self, 'Failed to take bugreport: %s' % out) br_out_path = out.split(':')[1].strip() self.adb.pull([br_out_path, full_out_path]) else: # shell=True as this command redirects the stdout to a local file # using shell redirection. self.adb.bugreport( ' > "%s"' % full_out_path, shell=True, timeout=timeout) self.log.info('Bugreport for %s taken at %s.', test_name, full_out_path)
def function[take_bug_report, parameter[self, test_name, begin_time, timeout, destination]]: constant[Takes a bug report on the device and stores it in a file. Args: test_name: Name of the test method that triggered this bug report. begin_time: Timestamp of when the test started. timeout: float, the number of seconds to wait for bugreport to complete, default is 5min. destination: string, path to the directory where the bugreport should be saved. ] variable[new_br] assign[=] constant[True] <ast.Try object at 0x7da1b08cbe50> if name[destination] begin[:] variable[br_path] assign[=] call[name[utils].abs_path, parameter[name[destination]]] call[name[utils].create_dir, parameter[name[br_path]]] variable[base_name] assign[=] binary_operation[constant[,%s,%s.txt] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b08555a0>, <ast.Attribute object at 0x7da1b08c9a20>]]] if name[new_br] begin[:] variable[base_name] assign[=] call[name[base_name].replace, parameter[constant[.txt], constant[.zip]]] variable[test_name_len] assign[=] binary_operation[name[utils].MAX_FILENAME_LEN - call[name[len], parameter[name[base_name]]]] variable[out_name] assign[=] binary_operation[call[name[test_name]][<ast.Slice object at 0x7da1b08c89d0>] + name[base_name]] variable[full_out_path] assign[=] call[name[os].path.join, parameter[name[br_path], call[name[out_name].replace, parameter[constant[ ], constant[\ ]]]]] call[name[self].wait_for_boot_completion, parameter[]] call[name[self].log.info, parameter[constant[Taking bugreport for %s.], name[test_name]]] if name[new_br] begin[:] variable[out] assign[=] call[call[name[self].adb.shell, parameter[constant[bugreportz]]].decode, parameter[constant[utf-8]]] if <ast.UnaryOp object at 0x7da1b08c9f30> begin[:] <ast.Raise object at 0x7da1b08c90c0> variable[br_out_path] assign[=] call[call[call[name[out].split, parameter[constant[:]]]][constant[1]].strip, parameter[]] call[name[self].adb.pull, parameter[list[[<ast.Name object at 0x7da1b0860e20>, <ast.Name object at 0x7da1b08619f0>]]]] call[name[self].log.info, parameter[constant[Bugreport for %s taken at %s.], name[test_name], name[full_out_path]]]
keyword[def] identifier[take_bug_report] ( identifier[self] , identifier[test_name] , identifier[begin_time] , identifier[timeout] = literal[int] , identifier[destination] = keyword[None] ): literal[string] identifier[new_br] = keyword[True] keyword[try] : identifier[stdout] = identifier[self] . identifier[adb] . identifier[shell] ( literal[string] ). identifier[decode] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[stdout] : identifier[new_br] = keyword[False] keyword[except] identifier[adb] . identifier[AdbError] : identifier[new_br] = keyword[False] keyword[if] identifier[destination] : identifier[br_path] = identifier[utils] . identifier[abs_path] ( identifier[destination] ) keyword[else] : identifier[br_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[log_path] , literal[string] ) identifier[utils] . identifier[create_dir] ( identifier[br_path] ) identifier[base_name] = literal[string] %( identifier[begin_time] , identifier[self] . identifier[_normalized_serial] ) keyword[if] identifier[new_br] : identifier[base_name] = identifier[base_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[test_name_len] = identifier[utils] . identifier[MAX_FILENAME_LEN] - identifier[len] ( identifier[base_name] ) identifier[out_name] = identifier[test_name] [: identifier[test_name_len] ]+ identifier[base_name] identifier[full_out_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[br_path] , identifier[out_name] . identifier[replace] ( literal[string] , literal[string] )) identifier[self] . identifier[wait_for_boot_completion] () identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[test_name] ) keyword[if] identifier[new_br] : identifier[out] = identifier[self] . identifier[adb] . identifier[shell] ( literal[string] , identifier[timeout] = identifier[timeout] ). identifier[decode] ( literal[string] ) keyword[if] keyword[not] identifier[out] . identifier[startswith] ( literal[string] ): keyword[raise] identifier[DeviceError] ( identifier[self] , literal[string] % identifier[out] ) identifier[br_out_path] = identifier[out] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] () identifier[self] . identifier[adb] . identifier[pull] ([ identifier[br_out_path] , identifier[full_out_path] ]) keyword[else] : identifier[self] . identifier[adb] . identifier[bugreport] ( literal[string] % identifier[full_out_path] , identifier[shell] = keyword[True] , identifier[timeout] = identifier[timeout] ) identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[test_name] , identifier[full_out_path] )
def take_bug_report(self, test_name, begin_time, timeout=300, destination=None): """Takes a bug report on the device and stores it in a file. Args: test_name: Name of the test method that triggered this bug report. begin_time: Timestamp of when the test started. timeout: float, the number of seconds to wait for bugreport to complete, default is 5min. destination: string, path to the directory where the bugreport should be saved. """ new_br = True try: stdout = self.adb.shell('bugreportz -v').decode('utf-8') # This check is necessary for builds before N, where adb shell's ret # code and stderr are not propagated properly. if 'not found' in stdout: new_br = False # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except adb.AdbError: new_br = False # depends on [control=['except'], data=[]] if destination: br_path = utils.abs_path(destination) # depends on [control=['if'], data=[]] else: br_path = os.path.join(self.log_path, 'BugReports') utils.create_dir(br_path) base_name = ',%s,%s.txt' % (begin_time, self._normalized_serial) if new_br: base_name = base_name.replace('.txt', '.zip') # depends on [control=['if'], data=[]] test_name_len = utils.MAX_FILENAME_LEN - len(base_name) out_name = test_name[:test_name_len] + base_name full_out_path = os.path.join(br_path, out_name.replace(' ', '\\ ')) # in case device restarted, wait for adb interface to return self.wait_for_boot_completion() self.log.info('Taking bugreport for %s.', test_name) if new_br: out = self.adb.shell('bugreportz', timeout=timeout).decode('utf-8') if not out.startswith('OK'): raise DeviceError(self, 'Failed to take bugreport: %s' % out) # depends on [control=['if'], data=[]] br_out_path = out.split(':')[1].strip() self.adb.pull([br_out_path, full_out_path]) # depends on [control=['if'], data=[]] else: # shell=True as this command redirects the stdout to a local file # using shell redirection. self.adb.bugreport(' > "%s"' % full_out_path, shell=True, timeout=timeout) self.log.info('Bugreport for %s taken at %s.', test_name, full_out_path)
def pong(self, message=None): '''Write a pong ``frame``. ''' return self.write(self.parser.pong(message), encode=False)
def function[pong, parameter[self, message]]: constant[Write a pong ``frame``. ] return[call[name[self].write, parameter[call[name[self].parser.pong, parameter[name[message]]]]]]
keyword[def] identifier[pong] ( identifier[self] , identifier[message] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[write] ( identifier[self] . identifier[parser] . identifier[pong] ( identifier[message] ), identifier[encode] = keyword[False] )
def pong(self, message=None): """Write a pong ``frame``. """ return self.write(self.parser.pong(message), encode=False)
def from_pb(cls, instance_pb, client): """Creates an instance from a protobuf. :type instance_pb: :class:`google.spanner.v2.spanner_instance_admin_pb2.Instance` :param instance_pb: A instance protobuf object. :type client: :class:`~google.cloud.spanner_v1.client.Client` :param client: The client that owns the instance. :rtype: :class:`Instance` :returns: The instance parsed from the protobuf response. :raises ValueError: if the instance name does not match ``projects/{project}/instances/{instance_id}`` or if the parsed project ID does not match the project ID on the client. """ match = _INSTANCE_NAME_RE.match(instance_pb.name) if match is None: raise ValueError( "Instance protobuf name was not in the " "expected format.", instance_pb.name, ) if match.group("project") != client.project: raise ValueError( "Project ID on instance does not match the " "project ID on the client" ) instance_id = match.group("instance_id") configuration_name = instance_pb.config result = cls(instance_id, client, configuration_name) result._update_from_pb(instance_pb) return result
def function[from_pb, parameter[cls, instance_pb, client]]: constant[Creates an instance from a protobuf. :type instance_pb: :class:`google.spanner.v2.spanner_instance_admin_pb2.Instance` :param instance_pb: A instance protobuf object. :type client: :class:`~google.cloud.spanner_v1.client.Client` :param client: The client that owns the instance. :rtype: :class:`Instance` :returns: The instance parsed from the protobuf response. :raises ValueError: if the instance name does not match ``projects/{project}/instances/{instance_id}`` or if the parsed project ID does not match the project ID on the client. ] variable[match] assign[=] call[name[_INSTANCE_NAME_RE].match, parameter[name[instance_pb].name]] if compare[name[match] is constant[None]] begin[:] <ast.Raise object at 0x7da18bcca1d0> if compare[call[name[match].group, parameter[constant[project]]] not_equal[!=] name[client].project] begin[:] <ast.Raise object at 0x7da207f015a0> variable[instance_id] assign[=] call[name[match].group, parameter[constant[instance_id]]] variable[configuration_name] assign[=] name[instance_pb].config variable[result] assign[=] call[name[cls], parameter[name[instance_id], name[client], name[configuration_name]]] call[name[result]._update_from_pb, parameter[name[instance_pb]]] return[name[result]]
keyword[def] identifier[from_pb] ( identifier[cls] , identifier[instance_pb] , identifier[client] ): literal[string] identifier[match] = identifier[_INSTANCE_NAME_RE] . identifier[match] ( identifier[instance_pb] . identifier[name] ) keyword[if] identifier[match] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] , identifier[instance_pb] . identifier[name] , ) keyword[if] identifier[match] . identifier[group] ( literal[string] )!= identifier[client] . identifier[project] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[instance_id] = identifier[match] . identifier[group] ( literal[string] ) identifier[configuration_name] = identifier[instance_pb] . identifier[config] identifier[result] = identifier[cls] ( identifier[instance_id] , identifier[client] , identifier[configuration_name] ) identifier[result] . identifier[_update_from_pb] ( identifier[instance_pb] ) keyword[return] identifier[result]
def from_pb(cls, instance_pb, client): """Creates an instance from a protobuf. :type instance_pb: :class:`google.spanner.v2.spanner_instance_admin_pb2.Instance` :param instance_pb: A instance protobuf object. :type client: :class:`~google.cloud.spanner_v1.client.Client` :param client: The client that owns the instance. :rtype: :class:`Instance` :returns: The instance parsed from the protobuf response. :raises ValueError: if the instance name does not match ``projects/{project}/instances/{instance_id}`` or if the parsed project ID does not match the project ID on the client. """ match = _INSTANCE_NAME_RE.match(instance_pb.name) if match is None: raise ValueError('Instance protobuf name was not in the expected format.', instance_pb.name) # depends on [control=['if'], data=[]] if match.group('project') != client.project: raise ValueError('Project ID on instance does not match the project ID on the client') # depends on [control=['if'], data=[]] instance_id = match.group('instance_id') configuration_name = instance_pb.config result = cls(instance_id, client, configuration_name) result._update_from_pb(instance_pb) return result
def verify(info, directory_path): """Return True if the checksum values in the torrent file match the computed checksum values of downloaded file(s) in the directory and if each file has the correct length as specified in the torrent file. """ base_path = os.path.join(directory_path, info['name']) if 'length' in info: if os.stat(base_path).st_size != info['length']: return False getfile = lambda: open(base_path, 'rb') else: assert 'files' in info, 'invalid torrent file' for f in info['files']: p = os.path.join(base_path, *f['path']) if os.stat(p).st_size != f['length']: return False getfile = lambda: ConcatenatedFile(base_path, info['files']) with getfile() as f: return compare_checksum(info, f)
def function[verify, parameter[info, directory_path]]: constant[Return True if the checksum values in the torrent file match the computed checksum values of downloaded file(s) in the directory and if each file has the correct length as specified in the torrent file. ] variable[base_path] assign[=] call[name[os].path.join, parameter[name[directory_path], call[name[info]][constant[name]]]] if compare[constant[length] in name[info]] begin[:] if compare[call[name[os].stat, parameter[name[base_path]]].st_size not_equal[!=] call[name[info]][constant[length]]] begin[:] return[constant[False]] variable[getfile] assign[=] <ast.Lambda object at 0x7da18bcc85b0> with call[name[getfile], parameter[]] begin[:] return[call[name[compare_checksum], parameter[name[info], name[f]]]]
keyword[def] identifier[verify] ( identifier[info] , identifier[directory_path] ): literal[string] identifier[base_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[directory_path] , identifier[info] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[info] : keyword[if] identifier[os] . identifier[stat] ( identifier[base_path] ). identifier[st_size] != identifier[info] [ literal[string] ]: keyword[return] keyword[False] identifier[getfile] = keyword[lambda] : identifier[open] ( identifier[base_path] , literal[string] ) keyword[else] : keyword[assert] literal[string] keyword[in] identifier[info] , literal[string] keyword[for] identifier[f] keyword[in] identifier[info] [ literal[string] ]: identifier[p] = identifier[os] . identifier[path] . identifier[join] ( identifier[base_path] ,* identifier[f] [ literal[string] ]) keyword[if] identifier[os] . identifier[stat] ( identifier[p] ). identifier[st_size] != identifier[f] [ literal[string] ]: keyword[return] keyword[False] identifier[getfile] = keyword[lambda] : identifier[ConcatenatedFile] ( identifier[base_path] , identifier[info] [ literal[string] ]) keyword[with] identifier[getfile] () keyword[as] identifier[f] : keyword[return] identifier[compare_checksum] ( identifier[info] , identifier[f] )
def verify(info, directory_path): """Return True if the checksum values in the torrent file match the computed checksum values of downloaded file(s) in the directory and if each file has the correct length as specified in the torrent file. """ base_path = os.path.join(directory_path, info['name']) if 'length' in info: if os.stat(base_path).st_size != info['length']: return False # depends on [control=['if'], data=[]] getfile = lambda : open(base_path, 'rb') # depends on [control=['if'], data=['info']] else: assert 'files' in info, 'invalid torrent file' for f in info['files']: p = os.path.join(base_path, *f['path']) if os.stat(p).st_size != f['length']: return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] getfile = lambda : ConcatenatedFile(base_path, info['files']) with getfile() as f: return compare_checksum(info, f) # depends on [control=['with'], data=['f']]
def create_all(cls, list_of_kwargs): """Batch method for creating a list of instances Args: list_of_kwargs(list of dicts): hereA list of dicts where each dict denotes the keyword args that you would pass to the create method separately Examples: >>> Customer.create_all([ ... {'name': 'Vicky', 'age': 34, 'user_id': 1}, ... {'name': 'Ron', 'age': 40, 'user_id': 1, 'gender': 'Male'}]) """ try: return cls.add_all([ cls.new(**kwargs) if kwargs is not None else None for kwargs in list_of_kwargs]) except: cls.session.rollback() raise
def function[create_all, parameter[cls, list_of_kwargs]]: constant[Batch method for creating a list of instances Args: list_of_kwargs(list of dicts): hereA list of dicts where each dict denotes the keyword args that you would pass to the create method separately Examples: >>> Customer.create_all([ ... {'name': 'Vicky', 'age': 34, 'user_id': 1}, ... {'name': 'Ron', 'age': 40, 'user_id': 1, 'gender': 'Male'}]) ] <ast.Try object at 0x7da1b242e590>
keyword[def] identifier[create_all] ( identifier[cls] , identifier[list_of_kwargs] ): literal[string] keyword[try] : keyword[return] identifier[cls] . identifier[add_all] ([ identifier[cls] . identifier[new] (** identifier[kwargs] ) keyword[if] identifier[kwargs] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] keyword[for] identifier[kwargs] keyword[in] identifier[list_of_kwargs] ]) keyword[except] : identifier[cls] . identifier[session] . identifier[rollback] () keyword[raise]
def create_all(cls, list_of_kwargs): """Batch method for creating a list of instances Args: list_of_kwargs(list of dicts): hereA list of dicts where each dict denotes the keyword args that you would pass to the create method separately Examples: >>> Customer.create_all([ ... {'name': 'Vicky', 'age': 34, 'user_id': 1}, ... {'name': 'Ron', 'age': 40, 'user_id': 1, 'gender': 'Male'}]) """ try: return cls.add_all([cls.new(**kwargs) if kwargs is not None else None for kwargs in list_of_kwargs]) # depends on [control=['try'], data=[]] except: cls.session.rollback() raise # depends on [control=['except'], data=[]]
def __cleanup_module(self, event): """ Auxiliary method for L{_notify_unload_dll}. """ pid = event.get_pid() process = event.get_process() module = event.get_module() # Cleanup thread breakpoints on this module for tid in process.iter_thread_ids(): thread = process.get_thread(tid) # Running breakpoints if tid in self.__runningBP: bplist = list(self.__runningBP[tid]) for bp in bplist: bp_address = bp.get_address() if process.get_module_at_address(bp_address) == module: self.__cleanup_breakpoint(event, bp) self.__runningBP[tid].remove(bp) # Hardware breakpoints if tid in self.__hardwareBP: bplist = list(self.__hardwareBP[tid]) for bp in bplist: bp_address = bp.get_address() if process.get_module_at_address(bp_address) == module: self.__cleanup_breakpoint(event, bp) self.__hardwareBP[tid].remove(bp) # Cleanup code breakpoints on this module for (bp_pid, bp_address) in compat.keys(self.__codeBP): if bp_pid == pid: if process.get_module_at_address(bp_address) == module: bp = self.__codeBP[ (bp_pid, bp_address) ] self.__cleanup_breakpoint(event, bp) del self.__codeBP[ (bp_pid, bp_address) ] # Cleanup page breakpoints on this module for (bp_pid, bp_address) in compat.keys(self.__pageBP): if bp_pid == pid: if process.get_module_at_address(bp_address) == module: bp = self.__pageBP[ (bp_pid, bp_address) ] self.__cleanup_breakpoint(event, bp) del self.__pageBP[ (bp_pid, bp_address) ]
def function[__cleanup_module, parameter[self, event]]: constant[ Auxiliary method for L{_notify_unload_dll}. ] variable[pid] assign[=] call[name[event].get_pid, parameter[]] variable[process] assign[=] call[name[event].get_process, parameter[]] variable[module] assign[=] call[name[event].get_module, parameter[]] for taget[name[tid]] in starred[call[name[process].iter_thread_ids, parameter[]]] begin[:] variable[thread] assign[=] call[name[process].get_thread, parameter[name[tid]]] if compare[name[tid] in name[self].__runningBP] begin[:] variable[bplist] assign[=] call[name[list], parameter[call[name[self].__runningBP][name[tid]]]] for taget[name[bp]] in starred[name[bplist]] begin[:] variable[bp_address] assign[=] call[name[bp].get_address, parameter[]] if compare[call[name[process].get_module_at_address, parameter[name[bp_address]]] equal[==] name[module]] begin[:] call[name[self].__cleanup_breakpoint, parameter[name[event], name[bp]]] call[call[name[self].__runningBP][name[tid]].remove, parameter[name[bp]]] if compare[name[tid] in name[self].__hardwareBP] begin[:] variable[bplist] assign[=] call[name[list], parameter[call[name[self].__hardwareBP][name[tid]]]] for taget[name[bp]] in starred[name[bplist]] begin[:] variable[bp_address] assign[=] call[name[bp].get_address, parameter[]] if compare[call[name[process].get_module_at_address, parameter[name[bp_address]]] equal[==] name[module]] begin[:] call[name[self].__cleanup_breakpoint, parameter[name[event], name[bp]]] call[call[name[self].__hardwareBP][name[tid]].remove, parameter[name[bp]]] for taget[tuple[[<ast.Name object at 0x7da18dc99540>, <ast.Name object at 0x7da18dc9a0e0>]]] in starred[call[name[compat].keys, parameter[name[self].__codeBP]]] begin[:] if compare[name[bp_pid] equal[==] name[pid]] begin[:] if compare[call[name[process].get_module_at_address, parameter[name[bp_address]]] equal[==] name[module]] begin[:] variable[bp] assign[=] call[name[self].__codeBP][tuple[[<ast.Name object at 0x7da20c6a9d80>, <ast.Name object at 0x7da20c6a8a30>]]] call[name[self].__cleanup_breakpoint, parameter[name[event], name[bp]]] <ast.Delete object at 0x7da20c6aa950> for taget[tuple[[<ast.Name object at 0x7da20c6aba30>, <ast.Name object at 0x7da20c6a9390>]]] in starred[call[name[compat].keys, parameter[name[self].__pageBP]]] begin[:] if compare[name[bp_pid] equal[==] name[pid]] begin[:] if compare[call[name[process].get_module_at_address, parameter[name[bp_address]]] equal[==] name[module]] begin[:] variable[bp] assign[=] call[name[self].__pageBP][tuple[[<ast.Name object at 0x7da204622560>, <ast.Name object at 0x7da204620b80>]]] call[name[self].__cleanup_breakpoint, parameter[name[event], name[bp]]] <ast.Delete object at 0x7da2046235b0>
keyword[def] identifier[__cleanup_module] ( identifier[self] , identifier[event] ): literal[string] identifier[pid] = identifier[event] . identifier[get_pid] () identifier[process] = identifier[event] . identifier[get_process] () identifier[module] = identifier[event] . identifier[get_module] () keyword[for] identifier[tid] keyword[in] identifier[process] . identifier[iter_thread_ids] (): identifier[thread] = identifier[process] . identifier[get_thread] ( identifier[tid] ) keyword[if] identifier[tid] keyword[in] identifier[self] . identifier[__runningBP] : identifier[bplist] = identifier[list] ( identifier[self] . identifier[__runningBP] [ identifier[tid] ]) keyword[for] identifier[bp] keyword[in] identifier[bplist] : identifier[bp_address] = identifier[bp] . identifier[get_address] () keyword[if] identifier[process] . identifier[get_module_at_address] ( identifier[bp_address] )== identifier[module] : identifier[self] . identifier[__cleanup_breakpoint] ( identifier[event] , identifier[bp] ) identifier[self] . identifier[__runningBP] [ identifier[tid] ]. identifier[remove] ( identifier[bp] ) keyword[if] identifier[tid] keyword[in] identifier[self] . identifier[__hardwareBP] : identifier[bplist] = identifier[list] ( identifier[self] . identifier[__hardwareBP] [ identifier[tid] ]) keyword[for] identifier[bp] keyword[in] identifier[bplist] : identifier[bp_address] = identifier[bp] . identifier[get_address] () keyword[if] identifier[process] . identifier[get_module_at_address] ( identifier[bp_address] )== identifier[module] : identifier[self] . identifier[__cleanup_breakpoint] ( identifier[event] , identifier[bp] ) identifier[self] . identifier[__hardwareBP] [ identifier[tid] ]. identifier[remove] ( identifier[bp] ) keyword[for] ( identifier[bp_pid] , identifier[bp_address] ) keyword[in] identifier[compat] . identifier[keys] ( identifier[self] . identifier[__codeBP] ): keyword[if] identifier[bp_pid] == identifier[pid] : keyword[if] identifier[process] . identifier[get_module_at_address] ( identifier[bp_address] )== identifier[module] : identifier[bp] = identifier[self] . identifier[__codeBP] [( identifier[bp_pid] , identifier[bp_address] )] identifier[self] . identifier[__cleanup_breakpoint] ( identifier[event] , identifier[bp] ) keyword[del] identifier[self] . identifier[__codeBP] [( identifier[bp_pid] , identifier[bp_address] )] keyword[for] ( identifier[bp_pid] , identifier[bp_address] ) keyword[in] identifier[compat] . identifier[keys] ( identifier[self] . identifier[__pageBP] ): keyword[if] identifier[bp_pid] == identifier[pid] : keyword[if] identifier[process] . identifier[get_module_at_address] ( identifier[bp_address] )== identifier[module] : identifier[bp] = identifier[self] . identifier[__pageBP] [( identifier[bp_pid] , identifier[bp_address] )] identifier[self] . identifier[__cleanup_breakpoint] ( identifier[event] , identifier[bp] ) keyword[del] identifier[self] . identifier[__pageBP] [( identifier[bp_pid] , identifier[bp_address] )]
def __cleanup_module(self, event): """ Auxiliary method for L{_notify_unload_dll}. """ pid = event.get_pid() process = event.get_process() module = event.get_module() # Cleanup thread breakpoints on this module for tid in process.iter_thread_ids(): thread = process.get_thread(tid) # Running breakpoints if tid in self.__runningBP: bplist = list(self.__runningBP[tid]) for bp in bplist: bp_address = bp.get_address() if process.get_module_at_address(bp_address) == module: self.__cleanup_breakpoint(event, bp) self.__runningBP[tid].remove(bp) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bp']] # depends on [control=['if'], data=['tid']] # Hardware breakpoints if tid in self.__hardwareBP: bplist = list(self.__hardwareBP[tid]) for bp in bplist: bp_address = bp.get_address() if process.get_module_at_address(bp_address) == module: self.__cleanup_breakpoint(event, bp) self.__hardwareBP[tid].remove(bp) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['bp']] # depends on [control=['if'], data=['tid']] # depends on [control=['for'], data=['tid']] # Cleanup code breakpoints on this module for (bp_pid, bp_address) in compat.keys(self.__codeBP): if bp_pid == pid: if process.get_module_at_address(bp_address) == module: bp = self.__codeBP[bp_pid, bp_address] self.__cleanup_breakpoint(event, bp) del self.__codeBP[bp_pid, bp_address] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['bp_pid']] # depends on [control=['for'], data=[]] # Cleanup page breakpoints on this module for (bp_pid, bp_address) in compat.keys(self.__pageBP): if bp_pid == pid: if process.get_module_at_address(bp_address) == module: bp = self.__pageBP[bp_pid, bp_address] self.__cleanup_breakpoint(event, bp) del self.__pageBP[bp_pid, bp_address] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['bp_pid']] # depends on [control=['for'], data=[]]
def new_signal(celf, path, iface, name) : "creates a new DBUS.MESSAGE_TYPE_SIGNAL message." result = dbus.dbus_message_new_signal(path.encode(), iface.encode(), name.encode()) if result == None : raise CallFailed("dbus_message_new_signal") #end if return \ celf(result)
def function[new_signal, parameter[celf, path, iface, name]]: constant[creates a new DBUS.MESSAGE_TYPE_SIGNAL message.] variable[result] assign[=] call[name[dbus].dbus_message_new_signal, parameter[call[name[path].encode, parameter[]], call[name[iface].encode, parameter[]], call[name[name].encode, parameter[]]]] if compare[name[result] equal[==] constant[None]] begin[:] <ast.Raise object at 0x7da20c990040> return[call[name[celf], parameter[name[result]]]]
keyword[def] identifier[new_signal] ( identifier[celf] , identifier[path] , identifier[iface] , identifier[name] ): literal[string] identifier[result] = identifier[dbus] . identifier[dbus_message_new_signal] ( identifier[path] . identifier[encode] (), identifier[iface] . identifier[encode] (), identifier[name] . identifier[encode] ()) keyword[if] identifier[result] == keyword[None] : keyword[raise] identifier[CallFailed] ( literal[string] ) keyword[return] identifier[celf] ( identifier[result] )
def new_signal(celf, path, iface, name): """creates a new DBUS.MESSAGE_TYPE_SIGNAL message.""" result = dbus.dbus_message_new_signal(path.encode(), iface.encode(), name.encode()) if result == None: raise CallFailed('dbus_message_new_signal') # depends on [control=['if'], data=[]] #end if return celf(result)
def write_vega(vega_data, *, title: Optional[str], write_to: str, write_format: str = "auto", indent: int = 2): """Write vega dictionary to an external file. Parameters ---------- vega_data : Valid vega data as dictionary write_to: Path to write vega JSON/HTML to. write_format: "auto" | "json" | "html" Whether to create a JSON data file or a full-fledged HTML page. indent: Indentation of JSON """ spec = json.dumps(vega_data, indent=indent) if write_format == "html" or write_format is "auto" and write_to.endswith(".html"): output = HTML_TEMPLATE.replace("{{ title }}", title or "Histogram").replace("{{ spec }}", spec) elif write_format == "json" or write_format is "auto" and write_to.endswith(".json"): output = spec else: raise RuntimeError("Format not understood.") with codecs.open(write_to, "w", encoding="utf-8") as out: out.write(output)
def function[write_vega, parameter[vega_data]]: constant[Write vega dictionary to an external file. Parameters ---------- vega_data : Valid vega data as dictionary write_to: Path to write vega JSON/HTML to. write_format: "auto" | "json" | "html" Whether to create a JSON data file or a full-fledged HTML page. indent: Indentation of JSON ] variable[spec] assign[=] call[name[json].dumps, parameter[name[vega_data]]] if <ast.BoolOp object at 0x7da18f723e50> begin[:] variable[output] assign[=] call[call[name[HTML_TEMPLATE].replace, parameter[constant[{{ title }}], <ast.BoolOp object at 0x7da18eb56800>]].replace, parameter[constant[{{ spec }}], name[spec]]] with call[name[codecs].open, parameter[name[write_to], constant[w]]] begin[:] call[name[out].write, parameter[name[output]]]
keyword[def] identifier[write_vega] ( identifier[vega_data] ,*, identifier[title] : identifier[Optional] [ identifier[str] ], identifier[write_to] : identifier[str] , identifier[write_format] : identifier[str] = literal[string] , identifier[indent] : identifier[int] = literal[int] ): literal[string] identifier[spec] = identifier[json] . identifier[dumps] ( identifier[vega_data] , identifier[indent] = identifier[indent] ) keyword[if] identifier[write_format] == literal[string] keyword[or] identifier[write_format] keyword[is] literal[string] keyword[and] identifier[write_to] . identifier[endswith] ( literal[string] ): identifier[output] = identifier[HTML_TEMPLATE] . identifier[replace] ( literal[string] , identifier[title] keyword[or] literal[string] ). identifier[replace] ( literal[string] , identifier[spec] ) keyword[elif] identifier[write_format] == literal[string] keyword[or] identifier[write_format] keyword[is] literal[string] keyword[and] identifier[write_to] . identifier[endswith] ( literal[string] ): identifier[output] = identifier[spec] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[with] identifier[codecs] . identifier[open] ( identifier[write_to] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[out] : identifier[out] . identifier[write] ( identifier[output] )
def write_vega(vega_data, *, title: Optional[str], write_to: str, write_format: str='auto', indent: int=2): """Write vega dictionary to an external file. Parameters ---------- vega_data : Valid vega data as dictionary write_to: Path to write vega JSON/HTML to. write_format: "auto" | "json" | "html" Whether to create a JSON data file or a full-fledged HTML page. indent: Indentation of JSON """ spec = json.dumps(vega_data, indent=indent) if write_format == 'html' or (write_format is 'auto' and write_to.endswith('.html')): output = HTML_TEMPLATE.replace('{{ title }}', title or 'Histogram').replace('{{ spec }}', spec) # depends on [control=['if'], data=[]] elif write_format == 'json' or (write_format is 'auto' and write_to.endswith('.json')): output = spec # depends on [control=['if'], data=[]] else: raise RuntimeError('Format not understood.') with codecs.open(write_to, 'w', encoding='utf-8') as out: out.write(output) # depends on [control=['with'], data=['out']]
def flatten(iterable, check=is_iterable): """Produces a recursively flattened version of ``iterable`` ``check`` Recurses only if check(value) is true. """ for value in iterable: if check(value): for flat in flatten(value, check): yield flat else: yield value
def function[flatten, parameter[iterable, check]]: constant[Produces a recursively flattened version of ``iterable`` ``check`` Recurses only if check(value) is true. ] for taget[name[value]] in starred[name[iterable]] begin[:] if call[name[check], parameter[name[value]]] begin[:] for taget[name[flat]] in starred[call[name[flatten], parameter[name[value], name[check]]]] begin[:] <ast.Yield object at 0x7da18c4cd6f0>
keyword[def] identifier[flatten] ( identifier[iterable] , identifier[check] = identifier[is_iterable] ): literal[string] keyword[for] identifier[value] keyword[in] identifier[iterable] : keyword[if] identifier[check] ( identifier[value] ): keyword[for] identifier[flat] keyword[in] identifier[flatten] ( identifier[value] , identifier[check] ): keyword[yield] identifier[flat] keyword[else] : keyword[yield] identifier[value]
def flatten(iterable, check=is_iterable): """Produces a recursively flattened version of ``iterable`` ``check`` Recurses only if check(value) is true. """ for value in iterable: if check(value): for flat in flatten(value, check): yield flat # depends on [control=['for'], data=['flat']] # depends on [control=['if'], data=[]] else: yield value # depends on [control=['for'], data=['value']]
def message(self, msg='', level=1, tab=0): '''Print a message to the console''' if self.verbosity >= level: self.stdout.write('{}{}'.format(' ' * tab, msg))
def function[message, parameter[self, msg, level, tab]]: constant[Print a message to the console] if compare[name[self].verbosity greater_or_equal[>=] name[level]] begin[:] call[name[self].stdout.write, parameter[call[constant[{}{}].format, parameter[binary_operation[constant[ ] * name[tab]], name[msg]]]]]
keyword[def] identifier[message] ( identifier[self] , identifier[msg] = literal[string] , identifier[level] = literal[int] , identifier[tab] = literal[int] ): literal[string] keyword[if] identifier[self] . identifier[verbosity] >= identifier[level] : identifier[self] . identifier[stdout] . identifier[write] ( literal[string] . identifier[format] ( literal[string] * identifier[tab] , identifier[msg] ))
def message(self, msg='', level=1, tab=0): """Print a message to the console""" if self.verbosity >= level: self.stdout.write('{}{}'.format(' ' * tab, msg)) # depends on [control=['if'], data=[]]
def start(client, container, interactive=True, stdout=None, stderr=None, stdin=None, logs=None): """ Present the PTY of the container inside the current process. This is just a wrapper for PseudoTerminal(client, container).start() """ operation = RunOperation(client, container, interactive=interactive, stdout=stdout, stderr=stderr, stdin=stdin, logs=logs) PseudoTerminal(client, operation).start()
def function[start, parameter[client, container, interactive, stdout, stderr, stdin, logs]]: constant[ Present the PTY of the container inside the current process. This is just a wrapper for PseudoTerminal(client, container).start() ] variable[operation] assign[=] call[name[RunOperation], parameter[name[client], name[container]]] call[call[name[PseudoTerminal], parameter[name[client], name[operation]]].start, parameter[]]
keyword[def] identifier[start] ( identifier[client] , identifier[container] , identifier[interactive] = keyword[True] , identifier[stdout] = keyword[None] , identifier[stderr] = keyword[None] , identifier[stdin] = keyword[None] , identifier[logs] = keyword[None] ): literal[string] identifier[operation] = identifier[RunOperation] ( identifier[client] , identifier[container] , identifier[interactive] = identifier[interactive] , identifier[stdout] = identifier[stdout] , identifier[stderr] = identifier[stderr] , identifier[stdin] = identifier[stdin] , identifier[logs] = identifier[logs] ) identifier[PseudoTerminal] ( identifier[client] , identifier[operation] ). identifier[start] ()
def start(client, container, interactive=True, stdout=None, stderr=None, stdin=None, logs=None): """ Present the PTY of the container inside the current process. This is just a wrapper for PseudoTerminal(client, container).start() """ operation = RunOperation(client, container, interactive=interactive, stdout=stdout, stderr=stderr, stdin=stdin, logs=logs) PseudoTerminal(client, operation).start()
def expressionLevelsGenerator(self, request): """ Returns a generator over the (expressionLevel, nextPageToken) pairs defined by the specified request. Currently only supports searching over a specified rnaQuantification """ rnaQuantificationId = request.rna_quantification_id compoundId = datamodel.RnaQuantificationCompoundId.parse( request.rna_quantification_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantSet = dataset.getRnaQuantificationSet( compoundId.rna_quantification_set_id) rnaQuant = rnaQuantSet.getRnaQuantification(rnaQuantificationId) rnaQuantificationId = rnaQuant.getLocalId() iterator = paging.ExpressionLevelsIterator( request, rnaQuant) return iterator
def function[expressionLevelsGenerator, parameter[self, request]]: constant[ Returns a generator over the (expressionLevel, nextPageToken) pairs defined by the specified request. Currently only supports searching over a specified rnaQuantification ] variable[rnaQuantificationId] assign[=] name[request].rna_quantification_id variable[compoundId] assign[=] call[name[datamodel].RnaQuantificationCompoundId.parse, parameter[name[request].rna_quantification_id]] variable[dataset] assign[=] call[call[name[self].getDataRepository, parameter[]].getDataset, parameter[name[compoundId].dataset_id]] variable[rnaQuantSet] assign[=] call[name[dataset].getRnaQuantificationSet, parameter[name[compoundId].rna_quantification_set_id]] variable[rnaQuant] assign[=] call[name[rnaQuantSet].getRnaQuantification, parameter[name[rnaQuantificationId]]] variable[rnaQuantificationId] assign[=] call[name[rnaQuant].getLocalId, parameter[]] variable[iterator] assign[=] call[name[paging].ExpressionLevelsIterator, parameter[name[request], name[rnaQuant]]] return[name[iterator]]
keyword[def] identifier[expressionLevelsGenerator] ( identifier[self] , identifier[request] ): literal[string] identifier[rnaQuantificationId] = identifier[request] . identifier[rna_quantification_id] identifier[compoundId] = identifier[datamodel] . identifier[RnaQuantificationCompoundId] . identifier[parse] ( identifier[request] . identifier[rna_quantification_id] ) identifier[dataset] = identifier[self] . identifier[getDataRepository] (). identifier[getDataset] ( identifier[compoundId] . identifier[dataset_id] ) identifier[rnaQuantSet] = identifier[dataset] . identifier[getRnaQuantificationSet] ( identifier[compoundId] . identifier[rna_quantification_set_id] ) identifier[rnaQuant] = identifier[rnaQuantSet] . identifier[getRnaQuantification] ( identifier[rnaQuantificationId] ) identifier[rnaQuantificationId] = identifier[rnaQuant] . identifier[getLocalId] () identifier[iterator] = identifier[paging] . identifier[ExpressionLevelsIterator] ( identifier[request] , identifier[rnaQuant] ) keyword[return] identifier[iterator]
def expressionLevelsGenerator(self, request): """ Returns a generator over the (expressionLevel, nextPageToken) pairs defined by the specified request. Currently only supports searching over a specified rnaQuantification """ rnaQuantificationId = request.rna_quantification_id compoundId = datamodel.RnaQuantificationCompoundId.parse(request.rna_quantification_id) dataset = self.getDataRepository().getDataset(compoundId.dataset_id) rnaQuantSet = dataset.getRnaQuantificationSet(compoundId.rna_quantification_set_id) rnaQuant = rnaQuantSet.getRnaQuantification(rnaQuantificationId) rnaQuantificationId = rnaQuant.getLocalId() iterator = paging.ExpressionLevelsIterator(request, rnaQuant) return iterator
def as_enum(enum): """ Turn a possibly string enum into an integer enum. """ if isinstance(enum, string_types): try: enum = getattr(gl, 'GL_' + enum.upper()) except AttributeError: try: enum = _internalformats['GL_' + enum.upper()] except KeyError: raise ValueError('Could not find int value for enum %r' % enum) return enum
def function[as_enum, parameter[enum]]: constant[ Turn a possibly string enum into an integer enum. ] if call[name[isinstance], parameter[name[enum], name[string_types]]] begin[:] <ast.Try object at 0x7da18dc9a7a0> return[name[enum]]
keyword[def] identifier[as_enum] ( identifier[enum] ): literal[string] keyword[if] identifier[isinstance] ( identifier[enum] , identifier[string_types] ): keyword[try] : identifier[enum] = identifier[getattr] ( identifier[gl] , literal[string] + identifier[enum] . identifier[upper] ()) keyword[except] identifier[AttributeError] : keyword[try] : identifier[enum] = identifier[_internalformats] [ literal[string] + identifier[enum] . identifier[upper] ()] keyword[except] identifier[KeyError] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[enum] ) keyword[return] identifier[enum]
def as_enum(enum): """ Turn a possibly string enum into an integer enum. """ if isinstance(enum, string_types): try: enum = getattr(gl, 'GL_' + enum.upper()) # depends on [control=['try'], data=[]] except AttributeError: try: enum = _internalformats['GL_' + enum.upper()] # depends on [control=['try'], data=[]] except KeyError: raise ValueError('Could not find int value for enum %r' % enum) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return enum
def _transition_loop(self): """Execute all queued transitions step by step.""" while self._transitions: start = time.time() for transition in self._transitions: transition.step() if transition.finished: self._transitions.remove(transition) time_delta = time.time() - start sleep_time = max(0, self.MIN_STEP_TIME - time_delta) time.sleep(sleep_time)
def function[_transition_loop, parameter[self]]: constant[Execute all queued transitions step by step.] while name[self]._transitions begin[:] variable[start] assign[=] call[name[time].time, parameter[]] for taget[name[transition]] in starred[name[self]._transitions] begin[:] call[name[transition].step, parameter[]] if name[transition].finished begin[:] call[name[self]._transitions.remove, parameter[name[transition]]] variable[time_delta] assign[=] binary_operation[call[name[time].time, parameter[]] - name[start]] variable[sleep_time] assign[=] call[name[max], parameter[constant[0], binary_operation[name[self].MIN_STEP_TIME - name[time_delta]]]] call[name[time].sleep, parameter[name[sleep_time]]]
keyword[def] identifier[_transition_loop] ( identifier[self] ): literal[string] keyword[while] identifier[self] . identifier[_transitions] : identifier[start] = identifier[time] . identifier[time] () keyword[for] identifier[transition] keyword[in] identifier[self] . identifier[_transitions] : identifier[transition] . identifier[step] () keyword[if] identifier[transition] . identifier[finished] : identifier[self] . identifier[_transitions] . identifier[remove] ( identifier[transition] ) identifier[time_delta] = identifier[time] . identifier[time] ()- identifier[start] identifier[sleep_time] = identifier[max] ( literal[int] , identifier[self] . identifier[MIN_STEP_TIME] - identifier[time_delta] ) identifier[time] . identifier[sleep] ( identifier[sleep_time] )
def _transition_loop(self): """Execute all queued transitions step by step.""" while self._transitions: start = time.time() for transition in self._transitions: transition.step() if transition.finished: self._transitions.remove(transition) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['transition']] time_delta = time.time() - start sleep_time = max(0, self.MIN_STEP_TIME - time_delta) time.sleep(sleep_time) # depends on [control=['while'], data=[]]
def _random_weights(n_features, lam, lam_perturb, prng): """Generate a symmetric random matrix with zeros along the diagnoal and non-zero elements take the value {lam * lam_perturb, lam / lam_perturb} with probability 1/2. """ weights = np.zeros((n_features, n_features)) n_off_diag = int((n_features ** 2 - n_features) / 2) berns = prng.binomial(1, 0.5, size=n_off_diag) vals = np.zeros(berns.shape) vals[berns == 0] = 1. * lam * lam_perturb vals[berns == 1] = 1. * lam / lam_perturb weights[np.triu_indices(n_features, k=1)] = vals weights[weights < 0] = 0 weights = weights + weights.T return weights
def function[_random_weights, parameter[n_features, lam, lam_perturb, prng]]: constant[Generate a symmetric random matrix with zeros along the diagnoal and non-zero elements take the value {lam * lam_perturb, lam / lam_perturb} with probability 1/2. ] variable[weights] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Name object at 0x7da1b117bf40>, <ast.Name object at 0x7da1b1178820>]]]] variable[n_off_diag] assign[=] call[name[int], parameter[binary_operation[binary_operation[binary_operation[name[n_features] ** constant[2]] - name[n_features]] / constant[2]]]] variable[berns] assign[=] call[name[prng].binomial, parameter[constant[1], constant[0.5]]] variable[vals] assign[=] call[name[np].zeros, parameter[name[berns].shape]] call[name[vals]][compare[name[berns] equal[==] constant[0]]] assign[=] binary_operation[binary_operation[constant[1.0] * name[lam]] * name[lam_perturb]] call[name[vals]][compare[name[berns] equal[==] constant[1]]] assign[=] binary_operation[binary_operation[constant[1.0] * name[lam]] / name[lam_perturb]] call[name[weights]][call[name[np].triu_indices, parameter[name[n_features]]]] assign[=] name[vals] call[name[weights]][compare[name[weights] less[<] constant[0]]] assign[=] constant[0] variable[weights] assign[=] binary_operation[name[weights] + name[weights].T] return[name[weights]]
keyword[def] identifier[_random_weights] ( identifier[n_features] , identifier[lam] , identifier[lam_perturb] , identifier[prng] ): literal[string] identifier[weights] = identifier[np] . identifier[zeros] (( identifier[n_features] , identifier[n_features] )) identifier[n_off_diag] = identifier[int] (( identifier[n_features] ** literal[int] - identifier[n_features] )/ literal[int] ) identifier[berns] = identifier[prng] . identifier[binomial] ( literal[int] , literal[int] , identifier[size] = identifier[n_off_diag] ) identifier[vals] = identifier[np] . identifier[zeros] ( identifier[berns] . identifier[shape] ) identifier[vals] [ identifier[berns] == literal[int] ]= literal[int] * identifier[lam] * identifier[lam_perturb] identifier[vals] [ identifier[berns] == literal[int] ]= literal[int] * identifier[lam] / identifier[lam_perturb] identifier[weights] [ identifier[np] . identifier[triu_indices] ( identifier[n_features] , identifier[k] = literal[int] )]= identifier[vals] identifier[weights] [ identifier[weights] < literal[int] ]= literal[int] identifier[weights] = identifier[weights] + identifier[weights] . identifier[T] keyword[return] identifier[weights]
def _random_weights(n_features, lam, lam_perturb, prng): """Generate a symmetric random matrix with zeros along the diagnoal and non-zero elements take the value {lam * lam_perturb, lam / lam_perturb} with probability 1/2. """ weights = np.zeros((n_features, n_features)) n_off_diag = int((n_features ** 2 - n_features) / 2) berns = prng.binomial(1, 0.5, size=n_off_diag) vals = np.zeros(berns.shape) vals[berns == 0] = 1.0 * lam * lam_perturb vals[berns == 1] = 1.0 * lam / lam_perturb weights[np.triu_indices(n_features, k=1)] = vals weights[weights < 0] = 0 weights = weights + weights.T return weights
def order_chunks(self, chunks): ''' Sort the chunk list verifying that the chunks follow the order specified in the order options. ''' cap = 1 for chunk in chunks: if 'order' in chunk: if not isinstance(chunk['order'], int): continue chunk_order = chunk['order'] if chunk_order > cap - 1 and chunk_order > 0: cap = chunk_order + 100 for chunk in chunks: if 'order' not in chunk: chunk['order'] = cap continue if not isinstance(chunk['order'], (int, float)): if chunk['order'] == 'last': chunk['order'] = cap + 1000000 elif chunk['order'] == 'first': chunk['order'] = 0 else: chunk['order'] = cap if 'name_order' in chunk: chunk['order'] = chunk['order'] + chunk.pop('name_order') / 10000.0 if chunk['order'] < 0: chunk['order'] = cap + 1000000 + chunk['order'] chunks.sort(key=lambda chunk: (chunk['order'], '{0[state]}{0[name]}{0[fun]}'.format(chunk))) return chunks
def function[order_chunks, parameter[self, chunks]]: constant[ Sort the chunk list verifying that the chunks follow the order specified in the order options. ] variable[cap] assign[=] constant[1] for taget[name[chunk]] in starred[name[chunks]] begin[:] if compare[constant[order] in name[chunk]] begin[:] if <ast.UnaryOp object at 0x7da1b215d180> begin[:] continue variable[chunk_order] assign[=] call[name[chunk]][constant[order]] if <ast.BoolOp object at 0x7da1b1c60cd0> begin[:] variable[cap] assign[=] binary_operation[name[chunk_order] + constant[100]] for taget[name[chunk]] in starred[name[chunks]] begin[:] if compare[constant[order] <ast.NotIn object at 0x7da2590d7190> name[chunk]] begin[:] call[name[chunk]][constant[order]] assign[=] name[cap] continue if <ast.UnaryOp object at 0x7da1b1c60310> begin[:] if compare[call[name[chunk]][constant[order]] equal[==] constant[last]] begin[:] call[name[chunk]][constant[order]] assign[=] binary_operation[name[cap] + constant[1000000]] if compare[constant[name_order] in name[chunk]] begin[:] call[name[chunk]][constant[order]] assign[=] binary_operation[call[name[chunk]][constant[order]] + binary_operation[call[name[chunk].pop, parameter[constant[name_order]]] / constant[10000.0]]] if compare[call[name[chunk]][constant[order]] less[<] constant[0]] begin[:] call[name[chunk]][constant[order]] assign[=] binary_operation[binary_operation[name[cap] + constant[1000000]] + call[name[chunk]][constant[order]]] call[name[chunks].sort, parameter[]] return[name[chunks]]
keyword[def] identifier[order_chunks] ( identifier[self] , identifier[chunks] ): literal[string] identifier[cap] = literal[int] keyword[for] identifier[chunk] keyword[in] identifier[chunks] : keyword[if] literal[string] keyword[in] identifier[chunk] : keyword[if] keyword[not] identifier[isinstance] ( identifier[chunk] [ literal[string] ], identifier[int] ): keyword[continue] identifier[chunk_order] = identifier[chunk] [ literal[string] ] keyword[if] identifier[chunk_order] > identifier[cap] - literal[int] keyword[and] identifier[chunk_order] > literal[int] : identifier[cap] = identifier[chunk_order] + literal[int] keyword[for] identifier[chunk] keyword[in] identifier[chunks] : keyword[if] literal[string] keyword[not] keyword[in] identifier[chunk] : identifier[chunk] [ literal[string] ]= identifier[cap] keyword[continue] keyword[if] keyword[not] identifier[isinstance] ( identifier[chunk] [ literal[string] ],( identifier[int] , identifier[float] )): keyword[if] identifier[chunk] [ literal[string] ]== literal[string] : identifier[chunk] [ literal[string] ]= identifier[cap] + literal[int] keyword[elif] identifier[chunk] [ literal[string] ]== literal[string] : identifier[chunk] [ literal[string] ]= literal[int] keyword[else] : identifier[chunk] [ literal[string] ]= identifier[cap] keyword[if] literal[string] keyword[in] identifier[chunk] : identifier[chunk] [ literal[string] ]= identifier[chunk] [ literal[string] ]+ identifier[chunk] . identifier[pop] ( literal[string] )/ literal[int] keyword[if] identifier[chunk] [ literal[string] ]< literal[int] : identifier[chunk] [ literal[string] ]= identifier[cap] + literal[int] + identifier[chunk] [ literal[string] ] identifier[chunks] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[chunk] :( identifier[chunk] [ literal[string] ], literal[string] . identifier[format] ( identifier[chunk] ))) keyword[return] identifier[chunks]
def order_chunks(self, chunks): """ Sort the chunk list verifying that the chunks follow the order specified in the order options. """ cap = 1 for chunk in chunks: if 'order' in chunk: if not isinstance(chunk['order'], int): continue # depends on [control=['if'], data=[]] chunk_order = chunk['order'] if chunk_order > cap - 1 and chunk_order > 0: cap = chunk_order + 100 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['chunk']] # depends on [control=['for'], data=['chunk']] for chunk in chunks: if 'order' not in chunk: chunk['order'] = cap continue # depends on [control=['if'], data=['chunk']] if not isinstance(chunk['order'], (int, float)): if chunk['order'] == 'last': chunk['order'] = cap + 1000000 # depends on [control=['if'], data=[]] elif chunk['order'] == 'first': chunk['order'] = 0 # depends on [control=['if'], data=[]] else: chunk['order'] = cap # depends on [control=['if'], data=[]] if 'name_order' in chunk: chunk['order'] = chunk['order'] + chunk.pop('name_order') / 10000.0 # depends on [control=['if'], data=['chunk']] if chunk['order'] < 0: chunk['order'] = cap + 1000000 + chunk['order'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chunk']] chunks.sort(key=lambda chunk: (chunk['order'], '{0[state]}{0[name]}{0[fun]}'.format(chunk))) return chunks
def remove_by_tag(self, tag): """ Remove the first encountered object with the specified tag from the world. Returns true if an object was found and removed. Returns false if no object could be removed. """ obj = self.find_obj_by_tag(tag) if obj != None: self.remove_obj(obj) return True return False
def function[remove_by_tag, parameter[self, tag]]: constant[ Remove the first encountered object with the specified tag from the world. Returns true if an object was found and removed. Returns false if no object could be removed. ] variable[obj] assign[=] call[name[self].find_obj_by_tag, parameter[name[tag]]] if compare[name[obj] not_equal[!=] constant[None]] begin[:] call[name[self].remove_obj, parameter[name[obj]]] return[constant[True]] return[constant[False]]
keyword[def] identifier[remove_by_tag] ( identifier[self] , identifier[tag] ): literal[string] identifier[obj] = identifier[self] . identifier[find_obj_by_tag] ( identifier[tag] ) keyword[if] identifier[obj] != keyword[None] : identifier[self] . identifier[remove_obj] ( identifier[obj] ) keyword[return] keyword[True] keyword[return] keyword[False]
def remove_by_tag(self, tag): """ Remove the first encountered object with the specified tag from the world. Returns true if an object was found and removed. Returns false if no object could be removed. """ obj = self.find_obj_by_tag(tag) if obj != None: self.remove_obj(obj) return True # depends on [control=['if'], data=['obj']] return False