code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def put_object(self, cont, obj, local_file): ''' Upload a file to Swift ''' try: with salt.utils.files.fopen(local_file, 'rb') as fp_: self.conn.put_object(cont, obj, fp_) return True except Exception as exc: log.error('There was an error::') if hasattr(exc, 'code') and hasattr(exc, 'msg'): log.error(' Code: %s: %s', exc.code, exc.msg) log.error(' Content: \n%s', getattr(exc, 'read', lambda: six.text_type(exc))()) return False
Upload a file to Swift
def _match_serializers_by_query_arg(self, serializers): """Match serializer by query arg.""" # if the format query argument is present, match the serializer arg_name = current_app.config.get('REST_MIMETYPE_QUERY_ARG_NAME') if arg_name: arg_value = request.args.get(arg_name, None) if arg_value is None: return None # Search for the serializer matching the format try: return serializers[ self.serializers_query_aliases[arg_value]] except KeyError: # either no serializer for this format return None return None
Match serializer by query arg.
def reset_to_flows(self, force=False, _meta=None): """ Keeps only the absolute values. This removes all attributes which can not be aggregated and must be recalculated after the aggregation. Parameters ---------- force: boolean, optional If True, reset to flows although the system can not be recalculated. Default: False _meta: MRIOMetaData, optional Metadata handler for logging, optional. Internal """ # Development note: The attributes which should be removed are # defined in self.__non_agg_attributes__ strwarn = None for df in self.__basic__: if (getattr(self, df)) is None: if force: strwarn = ("Reset system warning - Recalculation after " "reset not possible " "because {} missing".format(df)) warnings.warn(strwarn, ResetWarning) else: raise ResetError("To few tables to recalculate the " "system after reset ({} missing) " "- reset can be forced by passing " "'force=True')".format(df)) if _meta: _meta._add_modify("Reset to absolute flows") if strwarn: _meta._add_modify(strwarn) [setattr(self, key, None) for key in self.__non_agg_attributes__] return self
Keeps only the absolute values. This removes all attributes which can not be aggregated and must be recalculated after the aggregation. Parameters ---------- force: boolean, optional If True, reset to flows although the system can not be recalculated. Default: False _meta: MRIOMetaData, optional Metadata handler for logging, optional. Internal
def import_all(path): """Import all polygons from a .poly file. Returns a list of the imported polygon filters """ plist = [] fid = 0 while True: try: p = PolygonFilter(filename=path, fileid=fid) plist.append(p) fid += 1 except IndexError: break return plist
Import all polygons from a .poly file. Returns a list of the imported polygon filters
def dispatch_event(self, event): """ Takes an event dict. Logs the event if needed and cleans up the dict such as setting the index needed for composits. """ if self.config["debug"]: self.py3_wrapper.log("received event {}".format(event)) # usage variables event["index"] = event.get("index", "") instance = event.get("instance", "") name = event.get("name", "") # composites have an index which is passed to i3bar with # the instance. We need to separate this out here and # clean up the event. If index # is an integer type then cast it as such. if " " in instance: instance, index = instance.split(" ", 1) try: index = int(index) except ValueError: pass event["index"] = index event["instance"] = instance if self.config["debug"]: self.py3_wrapper.log( 'trying to dispatch event to module "{}"'.format( "{} {}".format(name, instance).strip() ) ) # guess the module config name module_name = "{} {}".format(name, instance).strip() default_event = False module_info = self.output_modules.get(module_name) module = module_info["module"] # execute any configured i3-msg command # we do not do this for containers # modules that have failed do not execute their config on_click if module.allow_config_clicks: button = event.get("button", 0) on_click = self.on_click.get(module_name, {}).get(str(button)) if on_click: task = EventClickTask(module_name, event, self, on_click) self.py3_wrapper.timeout_queue_add(task) # otherwise setup default action on button 2 press elif button == 2: default_event = True # do the work task = EventTask(module_name, event, default_event, self) self.py3_wrapper.timeout_queue_add(task)
Takes an event dict. Logs the event if needed and cleans up the dict such as setting the index needed for composits.
def GetKernelParams(time, flux, errors, kernel='Basic', mask=[], giter=3, gmaxf=200, guess=None): ''' Optimizes the GP by training it on the current de-trended light curve. Returns the white noise amplitude, red noise amplitude, and red noise timescale. :param array_like time: The time array :param array_like flux: The flux array :param array_like errors: The flux errors array :param array_like mask: The indices to be masked when training the GP. \ Default `[]` :param int giter: The number of iterations. Default 3 :param int gmaxf: The maximum number of function evaluations. Default 200 :param tuple guess: The guess to initialize the minimization with. \ Default :py:obj:`None` ''' log.info("Optimizing the GP...") # Save a copy of time and errors for later time_copy = np.array(time) errors_copy = np.array(errors) # Apply the mask time = np.delete(time, mask) flux = np.delete(flux, mask) errors = np.delete(errors, mask) # Remove 5-sigma outliers to be safe f = flux - savgol_filter(flux, 49, 2) + np.nanmedian(flux) med = np.nanmedian(f) MAD = 1.4826 * np.nanmedian(np.abs(f - med)) mask = np.where((f > med + 5 * MAD) | (f < med - 5 * MAD))[0] time = np.delete(time, mask) flux = np.delete(flux, mask) errors = np.delete(errors, mask) # Initial guesses and bounds white = np.nanmedian([np.nanstd(c) for c in Chunks(flux, 13)]) amp = np.nanstd(flux) tau = 30.0 if kernel == 'Basic': if guess is None: guess = [white, amp, tau] bounds = [[0.1 * white, 10. * white], [1., 10000. * amp], [0.5, 100.]] elif kernel == 'QuasiPeriodic': if guess is None: guess = [white, amp, tau, 1., 20.] bounds = [[0.1 * white, 10. * white], [1., 10000. * amp], [1e-5, 1e2], [0.02, 100.]] else: raise ValueError('Invalid value for `kernel`.') # Loop llbest = -np.inf xbest = np.array(guess) for i in range(giter): # Randomize an initial guess iguess = [np.inf for g in guess] for j, b in enumerate(bounds): tries = 0 while (iguess[j] < b[0]) or (iguess[j] > b[1]): iguess[j] = (1 + 0.5 * np.random.randn()) * guess[j] tries += 1 if tries > 100: iguess[j] = b[0] + np.random.random() * (b[1] - b[0]) break # Optimize x = fmin_l_bfgs_b(NegLnLike, iguess, approx_grad=False, bounds=bounds, args=(time, flux, errors, kernel), maxfun=gmaxf) log.info('Iteration #%d/%d:' % (i + 1, giter)) log.info(' ' + x[2]['task'].decode('utf-8')) log.info(' ' + 'Function calls: %d' % x[2]['funcalls']) log.info(' ' + 'Log-likelihood: %.3e' % -x[1]) if kernel == 'Basic': log.info(' ' + 'White noise : %.3e (%.1f x error bars)' % (x[0][0], x[0][0] / np.nanmedian(errors))) log.info(' ' + 'Red amplitude : %.3e (%.1f x stand dev)' % (x[0][1], x[0][1] / np.nanstd(flux))) log.info(' ' + 'Red timescale : %.2f days' % x[0][2]) elif kernel == 'QuasiPeriodic': log.info(' ' + 'White noise : %.3e (%.1f x error bars)' % (x[0][0], x[0][0] / np.nanmedian(errors))) log.info(' ' + 'Red amplitude : %.3e (%.1f x stand dev)' % (x[0][1], x[0][1] / np.nanstd(flux))) log.info(' ' + 'Gamma : %.3e' % x[0][2]) log.info(' ' + 'Period : %.2f days' % x[0][3]) if -x[1] > llbest: llbest = -x[1] xbest = np.array(x[0]) return xbest
Optimizes the GP by training it on the current de-trended light curve. Returns the white noise amplitude, red noise amplitude, and red noise timescale. :param array_like time: The time array :param array_like flux: The flux array :param array_like errors: The flux errors array :param array_like mask: The indices to be masked when training the GP. \ Default `[]` :param int giter: The number of iterations. Default 3 :param int gmaxf: The maximum number of function evaluations. Default 200 :param tuple guess: The guess to initialize the minimization with. \ Default :py:obj:`None`
def _is_attribute_property(name, klass): """ Check if the given attribute *name* is a property in the given *klass*. It will look for `property` calls or for functions with the given name, decorated by `property` or `property` subclasses. Returns ``True`` if the name is a property in the given klass, ``False`` otherwise. """ try: attributes = klass.getattr(name) except astroid.NotFoundError: return False property_name = "{}.property".format(BUILTINS) for attr in attributes: if attr is astroid.Uninferable: continue try: infered = next(attr.infer()) except astroid.InferenceError: continue if isinstance(infered, astroid.FunctionDef) and decorated_with_property( infered ): return True if infered.pytype() == property_name: return True return False
Check if the given attribute *name* is a property in the given *klass*. It will look for `property` calls or for functions with the given name, decorated by `property` or `property` subclasses. Returns ``True`` if the name is a property in the given klass, ``False`` otherwise.
def make_input(self): """Construct and write the input file of the calculation.""" # Set the file paths. all_files ={"ddkfile_" + str(n + 1): ddk for n, ddk in enumerate(self.ddk_filepaths)} all_files.update({"wfkfile": self.wfk_filepath}) files_nml = {"FILES": all_files} files= nmltostring(files_nml) # Get the input specified by the user user_file = nmltostring(self.input.as_dict()) # Join them. return files + user_file
Construct and write the input file of the calculation.
def _GetNormalizedTimestamp(self): """Retrieves the normalized timestamp. Returns: decimal.Decimal: normalized timestamp, which contains the number of seconds since January 1, 1970 00:00:00 and a fraction of second used for increased precision, or None if the normalized timestamp cannot be determined. """ if self._normalized_timestamp is None: if self._timestamp is not None: self._normalized_timestamp = ( decimal.Decimal(self._timestamp) / definitions.NANOSECONDS_PER_SECOND) return self._normalized_timestamp
Retrieves the normalized timestamp. Returns: decimal.Decimal: normalized timestamp, which contains the number of seconds since January 1, 1970 00:00:00 and a fraction of second used for increased precision, or None if the normalized timestamp cannot be determined.
def trace_walker(module): """ Defines a generator used to walk into modules. :param module: Module to walk. :type module: ModuleType :return: Class / Function / Method. :rtype: object or object """ for name, function in inspect.getmembers(module, inspect.isfunction): yield None, function for name, cls in inspect.getmembers(module, inspect.isclass): yield cls, None for name, method in inspect.getmembers(cls, inspect.ismethod): yield cls, method for name, function in inspect.getmembers(cls, inspect.isfunction): yield cls, function for name, accessor in inspect.getmembers(cls, lambda x: type(x) is property): yield cls, accessor.fget yield cls, accessor.fset yield cls, accessor.fdel
Defines a generator used to walk into modules. :param module: Module to walk. :type module: ModuleType :return: Class / Function / Method. :rtype: object or object
def get_activations(self): """Return a list of activations.""" res = (self.added, self.removed) self.added = set() self.removed = set() return res
Return a list of activations.
def groups(self): """Component groups Special property which point to a :class:`~pylls.cachet.ComponentGroups` instance for convenience. This instance is initialized on first call. """ if not self._groups: self._groups = ComponentGroups(self.api_client) return self._groups
Component groups Special property which point to a :class:`~pylls.cachet.ComponentGroups` instance for convenience. This instance is initialized on first call.
def read_requirements(path, strict_bounds, conda_format=False, filter_names=None): """ Read a requirements.txt file, expressed as a path relative to Zipline root. Returns requirements with the pinned versions as lower bounds if `strict_bounds` is falsey. """ real_path = join(dirname(abspath(__file__)), path) with open(real_path) as f: reqs = _filter_requirements(f.readlines(), filter_names=filter_names, filter_sys_version=not conda_format) if not strict_bounds: reqs = map(_with_bounds, reqs) if conda_format: reqs = map(_conda_format, reqs) return list(reqs)
Read a requirements.txt file, expressed as a path relative to Zipline root. Returns requirements with the pinned versions as lower bounds if `strict_bounds` is falsey.
def write(self, data, waitForResponse=True, timeout=5, parseError=True, writeTerm='\r', expectedResponseTermSeq=None): """ Write data to the modem. This method adds the ``\\r\\n`` end-of-line sequence to the data parameter, and writes it to the modem. :param data: Command/data to be written to the modem :type data: str :param waitForResponse: Whether this method should block and return the response from the modem or not :type waitForResponse: bool :param timeout: Maximum amount of time in seconds to wait for a response from the modem :type timeout: int :param parseError: If True, a CommandError is raised if the modem responds with an error (otherwise the response is returned as-is) :type parseError: bool :param writeTerm: The terminating sequence to append to the written data :type writeTerm: str :param expectedResponseTermSeq: The expected terminating sequence that marks the end of the modem's response (defaults to ``\\r\\n``) :type expectedResponseTermSeq: str :raise CommandError: if the command returns an error (only if parseError parameter is True) :raise TimeoutException: if no response to the command was received from the modem :return: A list containing the response lines from the modem, or None if waitForResponse is False :rtype: list """ self.log.debug('write: %s', data) responseLines = super(GsmModem, self).write(data + writeTerm, waitForResponse=waitForResponse, timeout=timeout, expectedResponseTermSeq=expectedResponseTermSeq) if self._writeWait > 0: # Sleep a bit if required (some older modems suffer under load) time.sleep(self._writeWait) if waitForResponse: cmdStatusLine = responseLines[-1] if parseError: if 'ERROR' in cmdStatusLine: cmErrorMatch = self.CM_ERROR_REGEX.match(cmdStatusLine) if cmErrorMatch: errorType = cmErrorMatch.group(1) errorCode = int(cmErrorMatch.group(2)) if errorCode == 515 or errorCode == 14: # 515 means: "Please wait, init or command processing in progress." # 14 means "SIM busy" self._writeWait += 0.2 # Increase waiting period temporarily # Retry the command after waiting a bit self.log.debug('Device/SIM busy error detected; self._writeWait adjusted to %fs', self._writeWait) time.sleep(self._writeWait) result = self.write(data, waitForResponse, timeout, parseError, writeTerm, expectedResponseTermSeq) self.log.debug('self_writeWait set to 0.1 because of recovering from device busy (515) error') if errorCode == 515: self._writeWait = 0.1 # Set this to something sane for further commands (slow modem) else: self._writeWait = 0 # The modem was just waiting for the SIM card return result if errorType == 'CME': raise CmeError(data, int(errorCode)) else: # CMS error raise CmsError(data, int(errorCode)) else: raise CommandError(data) elif cmdStatusLine == 'COMMAND NOT SUPPORT': # Some Huawei modems respond with this for unknown commands raise CommandError(data + '({0})'.format(cmdStatusLine)) return responseLines
Write data to the modem. This method adds the ``\\r\\n`` end-of-line sequence to the data parameter, and writes it to the modem. :param data: Command/data to be written to the modem :type data: str :param waitForResponse: Whether this method should block and return the response from the modem or not :type waitForResponse: bool :param timeout: Maximum amount of time in seconds to wait for a response from the modem :type timeout: int :param parseError: If True, a CommandError is raised if the modem responds with an error (otherwise the response is returned as-is) :type parseError: bool :param writeTerm: The terminating sequence to append to the written data :type writeTerm: str :param expectedResponseTermSeq: The expected terminating sequence that marks the end of the modem's response (defaults to ``\\r\\n``) :type expectedResponseTermSeq: str :raise CommandError: if the command returns an error (only if parseError parameter is True) :raise TimeoutException: if no response to the command was received from the modem :return: A list containing the response lines from the modem, or None if waitForResponse is False :rtype: list
def count(self): """Summary Returns: TYPE: Description """ return LazyOpResult( grizzly_impl.count( self.expr, self.weld_type ), WeldInt(), 0 )
Summary Returns: TYPE: Description
def _connect_control(self, event, param, arg): """ Is the actual callback function for :meth:`init_hw_connect_control_ex`. :param event: Event (:data:`CbEvent.EVENT_CONNECT`, :data:`CbEvent.EVENT_DISCONNECT` or :data:`CbEvent.EVENT_FATALDISCON`). :param param: Additional parameter depending on the event. - CbEvent.EVENT_CONNECT: always 0 - CbEvent.EVENT_DISCONNECT: always 0 - CbEvent.EVENT_FATALDISCON: USB-CAN-Handle of the disconnected module :param arg: Additional parameter defined with :meth:`init_hardware_ex` (not used in this wrapper class). """ log.debug("Event: %s, Param: %s" % (event, param)) if event == CbEvent.EVENT_FATALDISCON: self.fatal_disconnect_event(param) elif event == CbEvent.EVENT_CONNECT: self.connect_event() elif event == CbEvent.EVENT_DISCONNECT: self.disconnect_event()
Is the actual callback function for :meth:`init_hw_connect_control_ex`. :param event: Event (:data:`CbEvent.EVENT_CONNECT`, :data:`CbEvent.EVENT_DISCONNECT` or :data:`CbEvent.EVENT_FATALDISCON`). :param param: Additional parameter depending on the event. - CbEvent.EVENT_CONNECT: always 0 - CbEvent.EVENT_DISCONNECT: always 0 - CbEvent.EVENT_FATALDISCON: USB-CAN-Handle of the disconnected module :param arg: Additional parameter defined with :meth:`init_hardware_ex` (not used in this wrapper class).
def get_path_regex(self, path): """ Evaluate the registered path-alias regular expressions """ for regex, func in self._regex_map: match = re.match(regex, path) if match: return func(match) return None, None
Evaluate the registered path-alias regular expressions
def get_previous_request(rid): """Return the last ceph broker request sent on a given relation @param rid: Relation id to query for request """ request = None broker_req = relation_get(attribute='broker_req', rid=rid, unit=local_unit()) if broker_req: request_data = json.loads(broker_req) request = CephBrokerRq(api_version=request_data['api-version'], request_id=request_data['request-id']) request.set_ops(request_data['ops']) return request
Return the last ceph broker request sent on a given relation @param rid: Relation id to query for request
def execute(self, arg_list): """Main function to parse and dispatch commands by given ``arg_list`` :param arg_list: all arguments provided by the command line :param type: list """ arg_map = self.parser.parse_args(arg_list).__dict__ command = arg_map.pop(self._COMMAND_FLAG) return command(**arg_map)
Main function to parse and dispatch commands by given ``arg_list`` :param arg_list: all arguments provided by the command line :param type: list
def class_config_section(cls): """Get the config class config section""" def c(s): """return a commented, wrapped block.""" s = '\n\n'.join(wrap_paragraphs(s, 78)) return '# ' + s.replace('\n', '\n# ') # section header breaker = '#' + '-'*78 s = "# %s configuration"%cls.__name__ lines = [breaker, s, breaker, ''] # get the description trait desc = cls.class_traits().get('description') if desc: desc = desc.default_value else: # no description trait, use __doc__ desc = getattr(cls, '__doc__', '') if desc: lines.append(c(desc)) lines.append('') parents = [] for parent in cls.mro(): # only include parents that are not base classes # and are not the class itself # and have some configurable traits to inherit if parent is not cls and issubclass(parent, Configurable) and \ parent.class_traits(config=True): parents.append(parent) if parents: pstr = ', '.join([ p.__name__ for p in parents ]) lines.append(c('%s will inherit config from: %s'%(cls.__name__, pstr))) lines.append('') for name,trait in cls.class_traits(config=True).iteritems(): help = trait.get_metadata('help') or '' lines.append(c(help)) lines.append('# c.%s.%s = %r'%(cls.__name__, name, trait.get_default_value())) lines.append('') return '\n'.join(lines)
Get the config class config section
def get_all_entity_type_saved_searches(self, entitytype, **kwargs): # noqa: E501 """Get all saved searches for a specific entity type for a user # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_entity_type_saved_searches(entitytype, async_req=True) >>> result = thread.get() :param async_req bool :param str entitytype: (required) :param int offset: :param int limit: :return: ResponseContainerPagedSavedSearch If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_all_entity_type_saved_searches_with_http_info(entitytype, **kwargs) # noqa: E501 else: (data) = self.get_all_entity_type_saved_searches_with_http_info(entitytype, **kwargs) # noqa: E501 return data
Get all saved searches for a specific entity type for a user # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_all_entity_type_saved_searches(entitytype, async_req=True) >>> result = thread.get() :param async_req bool :param str entitytype: (required) :param int offset: :param int limit: :return: ResponseContainerPagedSavedSearch If the method is called asynchronously, returns the request thread.
def descendants(self, node): """ Returns a :class:`QuerySet` with all descendants for a given :class:`CTENode` `node`. :param node: the :class:`CTENode` whose descendants are required. :returns: A :class:`QuerySet` with all descendants of the given `node`. """ # We need to read the _cte_node_* attributes, so ensure they exist. self._ensure_parameters() # This is implemented in the CTE WHERE logic, so we pass a reference to # the offset CTENode to the custom QuerySet, which will process it. # Because the compiler will include the node in question in the offset, # we must exclude it here. return CTEQuerySet(self.model, using=self._db, offset=node).exclude(pk=node.pk)
Returns a :class:`QuerySet` with all descendants for a given :class:`CTENode` `node`. :param node: the :class:`CTENode` whose descendants are required. :returns: A :class:`QuerySet` with all descendants of the given `node`.
def patch_object(obj, attr, value): """ Context manager that temporarily patches an object *obj* by replacing its attribute *attr* with *value*. The original value is set again when the context is closed. """ orig = getattr(obj, attr, no_value) try: setattr(obj, attr, value) yield obj finally: try: if orig is no_value: delattr(obj, attr) else: setattr(obj, attr, orig) except: pass
Context manager that temporarily patches an object *obj* by replacing its attribute *attr* with *value*. The original value is set again when the context is closed.
def load_mnist_dataset(mode='supervised', one_hot=True): """Load the MNIST handwritten digits dataset. :param mode: 'supervised' or 'unsupervised' mode :param one_hot: whether to get one hot encoded labels :return: train, validation, test data: for (X, y) if 'supervised', for (X) if 'unsupervised' """ mnist = input_data.read_data_sets("MNIST_data/", one_hot=one_hot) # Training set trX = mnist.train.images trY = mnist.train.labels # Validation set vlX = mnist.validation.images vlY = mnist.validation.labels # Test set teX = mnist.test.images teY = mnist.test.labels if mode == 'supervised': return trX, trY, vlX, vlY, teX, teY elif mode == 'unsupervised': return trX, vlX, teX
Load the MNIST handwritten digits dataset. :param mode: 'supervised' or 'unsupervised' mode :param one_hot: whether to get one hot encoded labels :return: train, validation, test data: for (X, y) if 'supervised', for (X) if 'unsupervised'
def open(self, using=None, **kwargs): """ Opens the index in elasticsearch. Any additional keyword arguments will be passed to ``Elasticsearch.indices.open`` unchanged. """ return self._get_connection(using).indices.open(index=self._name, **kwargs)
Opens the index in elasticsearch. Any additional keyword arguments will be passed to ``Elasticsearch.indices.open`` unchanged.
def x10_all_lights_off(self, housecode): """Send the X10 All Lights Off command.""" msg = X10Send.command_msg(housecode, X10_COMMAND_ALL_LIGHTS_OFF) self.send_msg(msg) self._x10_command_to_device(housecode, X10_COMMAND_ALL_LIGHTS_OFF, msg)
Send the X10 All Lights Off command.
def seek_end(fileobj, offset): """Like fileobj.seek(-offset, 2), but will not try to go beyond the start Needed since file objects from BytesIO will not raise IOError and file objects from open() will raise IOError if going to a negative offset. To make things easier for custom implementations, instead of allowing both behaviors, we just don't do it. Args: fileobj (fileobj) offset (int): how many bytes away from the end backwards to seek to Raises: IOError """ if offset < 0: raise ValueError if get_size(fileobj) < offset: fileobj.seek(0, 0) else: fileobj.seek(-offset, 2)
Like fileobj.seek(-offset, 2), but will not try to go beyond the start Needed since file objects from BytesIO will not raise IOError and file objects from open() will raise IOError if going to a negative offset. To make things easier for custom implementations, instead of allowing both behaviors, we just don't do it. Args: fileobj (fileobj) offset (int): how many bytes away from the end backwards to seek to Raises: IOError
def set_querier_mode(self, dpid, server_port): """set the datapath to work as a querier. note that you can set up only the one querier. when you called this method several times, only the last one becomes effective.""" self.dpid = dpid self.server_port = server_port if self._querier_thread: hub.kill(self._querier_thread) self._querier_thread = None
set the datapath to work as a querier. note that you can set up only the one querier. when you called this method several times, only the last one becomes effective.
def file_mtime(file_path): """ Returns the file modified time. This is with regards to the last modification the file has had in the droopescan repo, rather than actual file modification time in the filesystem. @param file_path: file path relative to the executable. @return datetime.datetime object. """ if not os.path.isfile(file_path): raise IOError('File "%s" does not exist.' % file_path) ut = subprocess.check_output(['git', 'log', '-1', '--format=%ct', file_path]).strip() return datetime.fromtimestamp(int(ut))
Returns the file modified time. This is with regards to the last modification the file has had in the droopescan repo, rather than actual file modification time in the filesystem. @param file_path: file path relative to the executable. @return datetime.datetime object.
def async_batch_annotate_files( self, requests, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None, ): """ Run asynchronous image detection and annotation for a list of generic files, such as PDF files, which may contain multiple pages and multiple images per page. Progress and results can be retrieved through the ``google.longrunning.Operations`` interface. ``Operation.metadata`` contains ``OperationMetadata`` (metadata). ``Operation.response`` contains ``AsyncBatchAnnotateFilesResponse`` (results). Example: >>> from google.cloud import vision_v1p4beta1 >>> >>> client = vision_v1p4beta1.ImageAnnotatorClient() >>> >>> # TODO: Initialize `requests`: >>> requests = [] >>> >>> response = client.async_batch_annotate_files(requests) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest]]): Individual async file annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1p4beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid. """ # Wrap the transport method to add retry and timeout logic. if "async_batch_annotate_files" not in self._inner_api_calls: self._inner_api_calls[ "async_batch_annotate_files" ] = google.api_core.gapic_v1.method.wrap_method( self.transport.async_batch_annotate_files, default_retry=self._method_configs["AsyncBatchAnnotateFiles"].retry, default_timeout=self._method_configs["AsyncBatchAnnotateFiles"].timeout, client_info=self._client_info, ) request = image_annotator_pb2.AsyncBatchAnnotateFilesRequest(requests=requests) operation = self._inner_api_calls["async_batch_annotate_files"]( request, retry=retry, timeout=timeout, metadata=metadata ) return google.api_core.operation.from_gapic( operation, self.transport._operations_client, image_annotator_pb2.AsyncBatchAnnotateFilesResponse, metadata_type=image_annotator_pb2.OperationMetadata, )
Run asynchronous image detection and annotation for a list of generic files, such as PDF files, which may contain multiple pages and multiple images per page. Progress and results can be retrieved through the ``google.longrunning.Operations`` interface. ``Operation.metadata`` contains ``OperationMetadata`` (metadata). ``Operation.response`` contains ``AsyncBatchAnnotateFilesResponse`` (results). Example: >>> from google.cloud import vision_v1p4beta1 >>> >>> client = vision_v1p4beta1.ImageAnnotatorClient() >>> >>> # TODO: Initialize `requests`: >>> requests = [] >>> >>> response = client.async_batch_annotate_files(requests) >>> >>> def callback(operation_future): ... # Handle result. ... result = operation_future.result() >>> >>> response.add_done_callback(callback) >>> >>> # Handle metadata. >>> metadata = response.metadata() Args: requests (list[Union[dict, ~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest]]): Individual async file annotation requests for this batch. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.vision_v1p4beta1.types.AsyncAnnotateFileRequest` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.vision_v1p4beta1.types._OperationFuture` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
def set_index(self, index): """Display the data of the given index :param index: the index to paint :type index: QtCore.QModelIndex :returns: None :rtype: None :raises: None """ item = index.internalPointer() note = item.internal_data() self.content_lb.setText(note.content) self.created_dte.setDateTime(dt_to_qdatetime(note.date_created)) self.updated_dte.setDateTime(dt_to_qdatetime(note.date_updated)) self.username_lb.setText(note.user.username)
Display the data of the given index :param index: the index to paint :type index: QtCore.QModelIndex :returns: None :rtype: None :raises: None
def addField(self, field) : """add a filed to the legend""" if field.lower() in self.legend : raise ValueError("%s is already in the legend" % field.lower()) self.legend[field.lower()] = len(self.legend) if len(self.strLegend) > 0 : self.strLegend += self.separator + field else : self.strLegend += field
add a filed to the legend
def get_log_level(args): # type: (typing.Dict[str, typing.Any]) -> int """Get the log level from the CLI arguments. Removes logging arguments from sys.argv. Args: args: The parsed docopt arguments to be used to determine the logging level. Returns: The correct log level based on the three CLI arguments given. Raises: ValueError: Raised if the given log level is not in the acceptable list of values. """ index = -1 log_level = None if '<command>' in args and args['<command>']: index = sys.argv.index(args['<command>']) if args.get('--debug'): log_level = 'DEBUG' if '--debug' in sys.argv and sys.argv.index('--debug') < index: sys.argv.remove('--debug') elif '-d' in sys.argv and sys.argv.index('-d') < index: sys.argv.remove('-d') elif args.get('--verbose'): log_level = 'INFO' if '--verbose' in sys.argv and sys.argv.index('--verbose') < index: sys.argv.remove('--verbose') elif '-v' in sys.argv and sys.argv.index('-v') < index: sys.argv.remove('-v') elif args.get('--log-level'): log_level = args['--log-level'] sys.argv.remove('--log-level') sys.argv.remove(log_level) if log_level not in (None, 'DEBUG', 'INFO', 'WARN', 'ERROR'): raise exceptions.InvalidLogLevelError(log_level) return getattr(logging, log_level) if log_level else None
Get the log level from the CLI arguments. Removes logging arguments from sys.argv. Args: args: The parsed docopt arguments to be used to determine the logging level. Returns: The correct log level based on the three CLI arguments given. Raises: ValueError: Raised if the given log level is not in the acceptable list of values.
def _init_metadata(self): """stub""" TextAnswerFormRecord._init_metadata(self) FilesAnswerFormRecord._init_metadata(self) super(AnswerTextAndFilesMixin, self)._init_metadata()
stub
def register_class(self, class_type, component_scope=scope.InstancePerDependency, register_as=None): """ Registers the given class for creation via its constructor. :param class_type: The class type. :param component_scope: The scope of the component, defaults to instance per dependency. :param register_as: The types to register the class as, defaults to the given class_type. """ registration = _ConstructorRegistration(class_type, component_scope()) self._register(class_type, registration, register_as)
Registers the given class for creation via its constructor. :param class_type: The class type. :param component_scope: The scope of the component, defaults to instance per dependency. :param register_as: The types to register the class as, defaults to the given class_type.
def login(request, template_name='registration/login.html', redirect_field_name=REDIRECT_FIELD_NAME, authentication_form=AuthenticationForm, current_app=None, extra_context=None): """ Displays the login form and handles the login action. """ redirect_to = request.POST.get(redirect_field_name, request.GET.get(redirect_field_name, '')) if request.method == "POST": form = authentication_form(data=request.POST, request=request) if form.is_valid(): netloc = urlparse(redirect_to)[1] # Use default setting if redirect_to is empty if not redirect_to: redirect_to = settings.LOGIN_REDIRECT_URL # Heavier security check -- don't allow redirection to a different # host. elif netloc and netloc != request.get_host(): redirect_to = settings.LOGIN_REDIRECT_URL # Okay, security checks complete. Log the user in. auth_login(request, form.get_user()) return redirect(redirect_to) else: form = authentication_form(request) current_site = get_current_site(request) context = { 'form': form, redirect_field_name: redirect_to, 'site': current_site, 'site_name': current_site.name, } if extra_context is not None: context.update(extra_context) request.current_app = current_app return TemplateResponse(request, template_name, context)
Displays the login form and handles the login action.
def prepare_static_data(self, data): """ If user defined static fields, then process them with visiable value """ d = data.copy() for f in self.get_fields(): if f['static'] and f['name'] in d: d[f['name']] = make_view_field(f, None, self.types_convert_map, self.fields_convert_map, d[f['name']])['display'] return d
If user defined static fields, then process them with visiable value
def GET_namespace_info( self, path_info, namespace_id ): """ Look up a namespace's info Reply information about a namespace Reply 404 if the namespace doesn't exist Reply 502 for any error in talking to the blocksatck server """ if not check_namespace(namespace_id): return self._reply_json({'error': 'Invalid namespace'}, status_code=400) blockstackd_url = get_blockstackd_url() namespace_rec = blockstackd_client.get_namespace_record(namespace_id, hostport=blockstackd_url) if json_is_error(namespace_rec): # error status_code = namespace_rec.get('http_status', 502) return self._reply_json({'error': namespace_rec['error']}, status_code=status_code) self._reply_json(namespace_rec) return
Look up a namespace's info Reply information about a namespace Reply 404 if the namespace doesn't exist Reply 502 for any error in talking to the blocksatck server
def _parse_bool(value): """Convert ``string`` or ``bool`` to ``bool``.""" if isinstance(value, bool): return value elif isinstance(value, str): if value == 'True': return True elif value == 'False': return False raise Exception("Value %s is not boolean." % value)
Convert ``string`` or ``bool`` to ``bool``.
def remove(self): """Remove or delete the specified object from slick. You specify which one you want by providing the id as a parameter to the parent object, using it as a function. Example: slick.projects("4fd8cd95e4b0ee7ba54b9885").remove() """ url = self.getUrl() # hopefully when we discover what problems exist in slick to require this, we can take the loop out for retry in range(3): try: self.logger.debug("Making DELETE request to slick at url %s", url) r = requests.delete(url) self.logger.debug("Request returned status code %d", r.status_code) if r.status_code is 200: return None else: self.logger.debug("Body of what slick returned: %s", r.text) except BaseException as error: self.logger.warn("Received exception while connecting to slick at %s", url, exc_info=sys.exc_info()) raise SlickCommunicationError( "Tried 3 times to request data from slick at url %s without a successful status code.", url)
Remove or delete the specified object from slick. You specify which one you want by providing the id as a parameter to the parent object, using it as a function. Example: slick.projects("4fd8cd95e4b0ee7ba54b9885").remove()
def text_remove_empty_lines(text): """ Whitespace normalization: - Strip empty lines - Strip trailing whitespace """ lines = [ line.rstrip() for line in text.splitlines() if line.strip() ] return "\n".join(lines)
Whitespace normalization: - Strip empty lines - Strip trailing whitespace
def can_infect(self, event): """ Whether the spreading stop can infect using this event. """ if event.from_stop_I != self.stop_I: return False if not self.has_been_visited(): return False else: time_sep = event.dep_time_ut-self.get_min_visit_time() # if the gap between the earliest visit_time and current time is # smaller than the min. transfer time, the stop can pass the spreading # forward if (time_sep >= self.min_transfer_time) or (event.trip_I == -1 and time_sep >= 0): return True else: for visit in self.visit_events: # if no transfer, please hop-on if (event.trip_I == visit.trip_I) and (time_sep >= 0): return True return False
Whether the spreading stop can infect using this event.
def rank2d(X, y=None, ax=None, algorithm='pearson', features=None, show_feature_names=True, colormap='RdBu_r', **kwargs): """Displays pairwise comparisons of features with the algorithm and ranks them in a lower-left triangle heatmap plot. This helper function is a quick wrapper to utilize the Rank2D Visualizer (Transformer) for one-off analysis. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values ax : matplotlib axes the axis to plot the figure on. algorithm : one of {pearson, covariance, spearman, kendalltau} the ranking algorithm to use, default is Pearson correlation. features : list A list of feature names to use. If a DataFrame is passed to fit and features is None, feature names are selected as the columns of the DataFrame. show_feature_names : boolean, default: True If True, the feature names are used to label the axis ticks in the plot. colormap : string or cmap optional string or matplotlib cmap to colorize lines Use either color to colorize the lines on a per class basis or colormap to color them on a continuous scale. Returns ------- ax : matplotlib axes Returns the axes that the parallel coordinates were drawn on. """ # Instantiate the visualizer visualizer = Rank2D(ax, algorithm, features, colormap, show_feature_names, **kwargs) # Fit and transform the visualizer (calls draw) visualizer.fit(X, y, **kwargs) visualizer.transform(X) # Return the axes object on the visualizer return visualizer.ax
Displays pairwise comparisons of features with the algorithm and ranks them in a lower-left triangle heatmap plot. This helper function is a quick wrapper to utilize the Rank2D Visualizer (Transformer) for one-off analysis. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values ax : matplotlib axes the axis to plot the figure on. algorithm : one of {pearson, covariance, spearman, kendalltau} the ranking algorithm to use, default is Pearson correlation. features : list A list of feature names to use. If a DataFrame is passed to fit and features is None, feature names are selected as the columns of the DataFrame. show_feature_names : boolean, default: True If True, the feature names are used to label the axis ticks in the plot. colormap : string or cmap optional string or matplotlib cmap to colorize lines Use either color to colorize the lines on a per class basis or colormap to color them on a continuous scale. Returns ------- ax : matplotlib axes Returns the axes that the parallel coordinates were drawn on.
def is_ternary(self, keyword): """return true if the given keyword is a ternary keyword for this ControlLine""" return keyword in { 'if':set(['else', 'elif']), 'try':set(['except', 'finally']), 'for':set(['else']) }.get(self.keyword, [])
return true if the given keyword is a ternary keyword for this ControlLine
def get_cust_cols(path): """ Load custom column definitions. """ required_keys = ["title", "id", "sType", "visible"] with open(path, 'r') as f: try: cust_cols = ast.literal_eval(f.read()) except Exception as err: sys.stderr.write("Invalid custom columns file: {}\n".format(path)) sys.stderr.write("{}\n".format(err)) sys.exit(1) # Validate for col in cust_cols: for required_key in required_keys: if required_key not in col: sys.stderr.write("Missing required key '{}' in custom " "column {}\n".format(required_key, col)) sys.exit(1) if "jsonxs" not in col and "tpl" not in col: sys.stderr.write("You need to specify 'jsonxs' or 'tpl' " "for custom column {}\n".format(col)) sys.exit(1) return cust_cols
Load custom column definitions.
def preprocess(self, image, image_format): """ Preprocess an image. An API hook for image pre-processing. Calls any image format specific pre-processors (if defined). I.E. If `image_format` is 'JPEG', this method will look for a method named `preprocess_JPEG`, if found `image` will be passed to it. Arguments: * `image`: a PIL Image instance * `image_format`: str, a valid PIL format (i.e. 'JPEG' or 'GIF') Subclasses should return a 2-tuple: * [0]: A PIL Image instance. * [1]: A dictionary of additional keyword arguments to be used when the instance is saved. If no additional keyword arguments, return an empty dict ({}). """ save_kwargs = {'format': image_format} # Ensuring image is properly rotated if hasattr(image, '_getexif'): exif_datadict = image._getexif() # returns None if no EXIF data if exif_datadict is not None: exif = dict(exif_datadict.items()) orientation = exif.get(EXIF_ORIENTATION_KEY, None) if orientation == 3: image = image.transpose(Image.ROTATE_180) elif orientation == 6: image = image.transpose(Image.ROTATE_270) elif orientation == 8: image = image.transpose(Image.ROTATE_90) # Ensure any embedded ICC profile is preserved save_kwargs['icc_profile'] = image.info.get('icc_profile') if hasattr(self, 'preprocess_%s' % image_format): image, addl_save_kwargs = getattr( self, 'preprocess_%s' % image_format )(image=image) save_kwargs.update(addl_save_kwargs) return image, save_kwargs
Preprocess an image. An API hook for image pre-processing. Calls any image format specific pre-processors (if defined). I.E. If `image_format` is 'JPEG', this method will look for a method named `preprocess_JPEG`, if found `image` will be passed to it. Arguments: * `image`: a PIL Image instance * `image_format`: str, a valid PIL format (i.e. 'JPEG' or 'GIF') Subclasses should return a 2-tuple: * [0]: A PIL Image instance. * [1]: A dictionary of additional keyword arguments to be used when the instance is saved. If no additional keyword arguments, return an empty dict ({}).
def debug_shell(user_ns, user_global_ns, traceback=None, execWrapper=None): """ Spawns some interactive shell. Tries to use IPython if available. Falls back to :func:`pdb.post_mortem` or :func:`simple_debug_shell`. :param dict[str] user_ns: :param dict[str] user_global_ns: :param traceback: :param execWrapper: :return: nothing """ ipshell = None try: # noinspection PyPackageRequirements import IPython have_ipython = True except ImportError: have_ipython = False if not ipshell and traceback and have_ipython: # noinspection PyBroadException try: # noinspection PyPackageRequirements,PyUnresolvedReferences from IPython.core.debugger import Pdb # noinspection PyPackageRequirements,PyUnresolvedReferences from IPython.terminal.debugger import TerminalPdb # noinspection PyPackageRequirements,PyUnresolvedReferences from IPython.terminal.ipapp import TerminalIPythonApp ipapp = TerminalIPythonApp.instance() ipapp.interact = False # Avoid output (banner, prints) ipapp.initialize(argv=[]) def_colors = ipapp.shell.colors pdb_obj = TerminalPdb(def_colors) pdb_obj.botframe = None # not sure. exception otherwise at quit def ipshell(): """ Run the IPython shell. """ pdb_obj.interaction(None, traceback=traceback) except Exception: print("IPython Pdb exception:") better_exchook(*sys.exc_info(), autodebugshell=False) if not ipshell and have_ipython: # noinspection PyBroadException try: # noinspection PyPackageRequirements,PyUnresolvedReferences import IPython # noinspection PyPackageRequirements,PyUnresolvedReferences import IPython.terminal.embed class DummyMod(object): """Dummy module""" module = DummyMod() module.__dict__ = user_global_ns module.__name__ = "_DummyMod" if "__name__" not in user_ns: user_ns = user_ns.copy() user_ns["__name__"] = "_DummyUserNsMod" ipshell = IPython.terminal.embed.InteractiveShellEmbed.instance( user_ns=user_ns, user_module=module) except Exception: print("IPython not available:") better_exchook(*sys.exc_info(), autodebugshell=False) else: if execWrapper: old = ipshell.run_code ipshell.run_code = lambda code: execWrapper(lambda: old(code)) if ipshell: ipshell() else: print("Use simple debug shell:") if traceback: import pdb pdb.post_mortem(traceback) else: simple_debug_shell(user_global_ns, user_ns)
Spawns some interactive shell. Tries to use IPython if available. Falls back to :func:`pdb.post_mortem` or :func:`simple_debug_shell`. :param dict[str] user_ns: :param dict[str] user_global_ns: :param traceback: :param execWrapper: :return: nothing
def from_gaussian_draw(cls,pst,cov,num_reals=1,use_homegrown=True,group_chunks=False, fill_fixed=True,enforce_bounds=False): """ instantiate a parameter ensemble from a covariance matrix Parameters ---------- pst : pyemu.Pst a control file instance cov : (pyemu.Cov) covariance matrix to use for drawing num_reals : int number of realizations to generate use_homegrown : bool flag to use home-grown full cov draws...much faster than numpy... group_chunks : bool flag to break up draws by par groups. Only applies to homegrown, full cov case. Default is False fill_fixed : bool flag to fill in fixed parameters from the pst into the ensemble using the parval1 from the pst. Default is True enforce_bounds : bool flag to enforce parameter bounds from the pst. realized parameter values that violate bounds are simply changed to the value of the violated bound. Default is False Returns ------- ParameterEnsemble : ParameterEnsemble """ # set up some column names #real_names = ["{0:d}".format(i) # for i in range(num_reals)] real_names = np.arange(num_reals,dtype=np.int64) li = pst.parameter_data.partrans == "log" vals = pst.parameter_data.parval1.copy() vals[li] = vals.loc[li].apply(np.log10) # make sure everything is cool WRT ordering if list(vals.index.values) != cov.row_names: common_names = get_common_elements(vals.index.values, cov.row_names) if len(common_names) == 0: raise Exception("ParameterEnsemble::from_gaussian_draw() error: cov and pst share no common names") vals = vals.loc[common_names] cov = cov.get(common_names) else: common_names = cov.row_names li = pst.parameter_data.partrans.loc[common_names] == "log" if cov.isdiagonal: #print("making diagonal cov draws") #print("building mean and std dicts") arr = np.zeros((num_reals,len(vals))) stds = {pname:std for pname,std in zip(common_names,np.sqrt(cov.x.flatten()))} means = {pname:val for pname,val in zip(common_names,vals)} #print("numpy draw") arr = np.random.randn(num_reals,len(common_names)) #print("post-processing") adj_pars = set(pst.adj_par_names) for i,pname in enumerate(common_names): if pname in adj_pars: #s = stds[pname] #v = means[pname] #arr[:,i] = np.random.normal(means[pname],stds[pname], # size=num_reals) arr[:,i] = (arr[:,i] * stds[pname]) + means[pname] else: arr[:,i] = means[pname] #print("build df") df = pd.DataFrame(data=arr,columns=common_names,index=real_names) else: if use_homegrown: print("making full cov draws with home-grown goodness") # generate standard normal vectors # jwhite - 18-dec-17: the cholesky version is giving significantly diff # results compared to eigen solve, so turning this off for now - need to # learn more about this... # use_chol = False # if use_chol: # a = np.linalg.cholesky(cov.as_2d) # # else: # decompose... if group_chunks: par_cov = pst.parameter_data.loc[cov.names,:] par_cov.loc[:,"idxs"] = np.arange(cov.shape[0]) #print("algning cov") #cov.align(list(par_cov.parnme)) pargps = par_cov.pargp.unique() #print("reserving reals matrix") reals = np.zeros((num_reals,cov.shape[0])) for ipg,pargp in enumerate(pargps): pnames = list(par_cov.loc[par_cov.pargp==pargp,"parnme"]) idxs = par_cov.loc[par_cov.pargp == pargp, "idxs"] #print("{0} of {1} drawing for par group '{2}' with {3} pars " # .format(ipg+1,len(pargps),pargp, len(idxs))) s,e = idxs[0],idxs[-1] #print("generating snv matrix") snv = np.random.randn(num_reals, len(pnames)) cov_pg = cov.get(pnames) if len(pnames) == 1: std = np.sqrt(cov_pg.x) reals[:,idxs] = vals[pnames].values[0] + (snv * std) else: try: cov_pg.inv except: covname = "trouble_{0}.cov".format(pargp) #print('saving toubled cov matrix to {0}'.format(covname)) cov_pg.to_ascii(covname) #print(cov_pg.get_diagonal_vector()) raise Exception("error inverting cov for par group '{0}',"+\ "saved trouble cov to {1}". format(pargp,covname)) v, w = np.linalg.eigh(cov_pg.as_2d) # check for near zero eig values #vdiag = np.diag(v) for i in range(v.shape[0]): if v[i] > 1.0e-10: pass else: print("near zero eigen value found",v[i],\ "at index",i," of ",v.shape[0]) v[i] = 0.0 vsqrt = np.sqrt(v) vsqrt[i:] = 0.0 v = np.diag(vsqrt) a = np.dot(w, v) pg_vals = vals[pnames] for i in range(num_reals): #v = snv[i,:] #p = np.dot(a,v) reals[i,idxs] = pg_vals + np.dot(a,snv[i,:]) else: #print("generating snv matrix") snv = np.random.randn(num_reals, cov.shape[0]) #print("eigen solve for full cov") v, w = np.linalg.eigh(cov.as_2d) #w, v, other = np.linalg.svd(cov.as_2d,full_matrices=True,compute_uv=True) # vdiag = np.diag(v) for i in range(v.shape[0]): if v[i] > 1.0e-10: pass else: print("near zero eigen value found", v[i], \ "at index", i, " of ", v.shape[0]) v[i] = 0.0 # form projection matrix #print("form projection") a = np.dot(w, np.sqrt(np.diag(v))) #print(a) # project... reals = [] for vec in snv: real = vals + np.dot(a, vec) reals.append(real) df = pd.DataFrame(reals, columns=common_names, index=real_names) #vals = pe.mean_values else: #print("making full cov draws with numpy") df = pd.DataFrame(data=np.random.multivariate_normal(vals, cov.as_2d,num_reals), columns = common_names,index=real_names) #print(df.shape,cov.shape) df.loc[:,li] = 10.0**df.loc[:,li] # replace the realizations for fixed parameters with the original # parval1 in the control file #print("handling fixed pars") #pe.pst.parameter_data.index = pe.pst.parameter_data.parnme if fill_fixed: par = pst.parameter_data fixed_vals = par.loc[par.partrans.apply(lambda x: x in ["fixed","tied"]),"parval1"] for fname,fval in zip(fixed_vals.index,fixed_vals.values): #print(fname) df.loc[:,fname] = fval #print("apply tied") new_pe = cls.from_dataframe(pst=pst,df=df) if enforce_bounds: new_pe.enforce() return new_pe
instantiate a parameter ensemble from a covariance matrix Parameters ---------- pst : pyemu.Pst a control file instance cov : (pyemu.Cov) covariance matrix to use for drawing num_reals : int number of realizations to generate use_homegrown : bool flag to use home-grown full cov draws...much faster than numpy... group_chunks : bool flag to break up draws by par groups. Only applies to homegrown, full cov case. Default is False fill_fixed : bool flag to fill in fixed parameters from the pst into the ensemble using the parval1 from the pst. Default is True enforce_bounds : bool flag to enforce parameter bounds from the pst. realized parameter values that violate bounds are simply changed to the value of the violated bound. Default is False Returns ------- ParameterEnsemble : ParameterEnsemble
def limits(self,x1,x2,y1,y2): """Set the coordinate boundaries of plot""" import math self.x1=x1 self.x2=x2 self.y1=y1 self.y2=y2 self.xscale=(self.cx2-self.cx1)/(self.x2-self.x1) self.yscale=(self.cy2-self.cy1)/(self.y2-self.y1) ra1=self.x1 ra2=self.x2 dec1=self.y1 dec2=self.y2 (sx1,sy2)=self.p2c((ra1,dec1)) (sx2,sy1)=self.p2c((ra2,dec2)) self.config(scrollregion=(sx1-self.lgutter,sy1+self.bgutter,sx2+self.rgutter,sy2-self.tgutter))
Set the coordinate boundaries of plot
def neighbors(self, subid, params=None): ''' v1/server/neighbors GET - account Determine what other subscriptions are hosted on the same physical host as a given subscription. Link: https://www.vultr.com/api/#server_neighbors ''' params = update_params(params, {'SUBID': subid}) return self.request('/v1/server/neighbors', params, 'GET')
v1/server/neighbors GET - account Determine what other subscriptions are hosted on the same physical host as a given subscription. Link: https://www.vultr.com/api/#server_neighbors
def column_lists_equal(a: List[Column], b: List[Column]) -> bool: """ Are all columns in list ``a`` equal to their counterparts in list ``b``, as per :func:`columns_equal`? """ n = len(a) if len(b) != n: return False for i in range(n): if not columns_equal(a[i], b[i]): log.debug("Mismatch: {!r} != {!r}", a[i], b[i]) return False return True
Are all columns in list ``a`` equal to their counterparts in list ``b``, as per :func:`columns_equal`?
def parse(self, node): """ Return generator yielding Field objects for a given node """ self._attrs = {} vals = [] yielded = False for x in self._read_parts(node): if isinstance(x, Field): yielded = True x.attrs = self._attrs yield x else: vals.append(ustr(x).strip(' \n\t')) joined = ' '.join([ x for x in vals if x ]) if joined: yielded = True yield Field(node, guess_type(joined), self._attrs) if not yielded: yield Field(node, "", self._attrs)
Return generator yielding Field objects for a given node
def activatewindow(self, window_name): """ Activate window. @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @return: 1 on success. @rtype: integer """ window_handle = self._get_window_handle(window_name) self._grabfocus(window_handle) return 1
Activate window. @param window_name: Window name to look for, either full name, LDTP's name convention, or a Unix glob. @type window_name: string @return: 1 on success. @rtype: integer
def _listen_inbox_messages(self): """Start listening to messages, using a separate thread.""" # Collect messages in a queue inbox_queue = Queue(maxsize=self._n_jobs * 4) threads = [] # type: List[BotQueueWorker] try: # Create n_jobs inbox threads for i in range(self._n_jobs): t = BotQueueWorker(name='InboxThread-t-{}'.format(i), jobs=inbox_queue, target=self._process_inbox_message) t.start() self._threads.append(t) # Iterate over all messages in the messages stream for message in self._reddit.inbox.stream(): # Check for stopping if self._stop: self._do_stop(inbox_queue, threads) break inbox_queue.put(message) self.log.debug('Listen inbox stopped') except Exception as e: self._do_stop(inbox_queue, threads) self.log.error('Exception while listening to inbox:') self.log.error(str(e)) self.log.error('Waiting for 10 minutes and trying again.') time.sleep(10 * 60) # Retry: self._listen_inbox_messages()
Start listening to messages, using a separate thread.
def parse(self, text, noprefix=False): """Parse date and time from given date string. :param text: Any human readable string :type date_string: str|unicode :param noprefix: If set True than doesn't use prefix based date patterns filtering settings :type noprefix: bool :return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None :rtype: :class:`datetime <datetime.datetime>`.""" res = self.match(text, noprefix) if res: r = res['values'] p = res['pattern'] d = {'month': 0, 'day': 0, 'year': 0} if 'noyear' in p and p['noyear'] == True: d['year'] = datetime.datetime.now().year for k, v in list(r.items()): d[k] = int(v) dt = datetime.datetime(**d) return dt return None
Parse date and time from given date string. :param text: Any human readable string :type date_string: str|unicode :param noprefix: If set True than doesn't use prefix based date patterns filtering settings :type noprefix: bool :return: Returns :class:`datetime <datetime.datetime>` representing parsed date if successful, else returns None :rtype: :class:`datetime <datetime.datetime>`.
def vlan_classifier_group_groupid(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") vlan = ET.SubElement(config, "vlan", xmlns="urn:brocade.com:mgmt:brocade-vlan") classifier = ET.SubElement(vlan, "classifier") group = ET.SubElement(classifier, "group") oper_key = ET.SubElement(group, "oper") oper_key.text = kwargs.pop('oper') rule_name_key = ET.SubElement(group, "rule-name") rule_name_key.text = kwargs.pop('rule_name') ruleid_key = ET.SubElement(group, "ruleid") ruleid_key.text = kwargs.pop('ruleid') groupid = ET.SubElement(group, "groupid") groupid.text = kwargs.pop('groupid') callback = kwargs.pop('callback', self._callback) return callback(config)
Auto Generated Code
def get_message(self, id): """ Return a Message object for given id. :param id: The id of the message object to return. """ url = self._base_url + "/3/message/{0}".format(id) resp = self._send_request(url) return Message(resp, self)
Return a Message object for given id. :param id: The id of the message object to return.
def select_from_array(cls, array, identifier): """Return a region from a numpy array. :param array: :class:`numpy.ndarray` :param identifier: value representing the region to select in the array :returns: :class:`jicimagelib.region.Region` """ base_array = np.zeros(array.shape) array_coords = np.where(array == identifier) base_array[array_coords] = 1 return cls(base_array)
Return a region from a numpy array. :param array: :class:`numpy.ndarray` :param identifier: value representing the region to select in the array :returns: :class:`jicimagelib.region.Region`
def online(note, github_repository, github_username): """Upload the repository to GitHub and enable testing on Travis CI.""" callbacks.git_installed() try: repo = git.Repo() except git.InvalidGitRepositoryError: LOGGER.critical( "'memote online' requires a git repository in order to follow " "the current branch's commit history.") sys.exit(1) if note == "memote-ci access": note = "{} to {}".format(note, github_repository) # Github API calls # Set up the git repository on GitHub via API v3. gh_repo_name, auth_token, repo_access_token = _setup_gh_repo( github_repository, github_username, note ) # Travis API calls # Configure Travis CI to use Github auth token then return encrypted token. secret = _setup_travis_ci(gh_repo_name, auth_token, repo_access_token) # Save the encrypted token in the travis config then commit and push LOGGER.info("Storing GitHub token in '.travis.yml'.") config = te.load_travis_configuration(".travis.yml") global_env = config.setdefault("env", {}).get("global") if global_env is None: config["env"]["global"] = global_env = {} try: global_env["secure"] = secret except TypeError: global_env.append({"secure": secret}) te.dump_travis_configuration(config, ".travis.yml") LOGGER.info("Add, commit and push changes to '.travis.yml' to GitHub.") repo.index.add([".travis.yml"]) check_call( ['git', 'commit', '-m', "chore: add encrypted GitHub access token"] ) check_call( ['git', 'push', '--set-upstream', 'origin', repo.active_branch.name] )
Upload the repository to GitHub and enable testing on Travis CI.
def until_state(self, state, timeout=None): """Return a tornado Future that will resolve when the requested state is set""" if state not in self._valid_states: raise ValueError('State must be one of {0}, not {1}' .format(self._valid_states, state)) if state != self._state: if timeout: return with_timeout(self._ioloop.time() + timeout, self._waiting_futures[state], self._ioloop) else: return self._waiting_futures[state] else: f = tornado_Future() f.set_result(True) return f
Return a tornado Future that will resolve when the requested state is set
def search(context, keywords, module, raw, kind): """Query Windows identifiers and locations. Windows database must be prepared before using this. """ logging.info(_('Entering search mode')) sense = context.obj['sense'] func = sense.query_names if module else sense.query_info none = True for keyword in keywords: output = func(keyword, raw, kind) if output: none = False print(output) else: logging.warning(_('No results: %s'), keyword) sys.exit(1 if none else 0)
Query Windows identifiers and locations. Windows database must be prepared before using this.
def run_executable(repo, args, includes): """ Run the executable and capture the input and output... """ # Get platform information mgr = plugins_get_mgr() repomgr = mgr.get(what='instrumentation', name='platform') platform_metadata = repomgr.get_metadata() print("Obtaining Commit Information") (executable, commiturl) = \ find_executable_commitpath(repo, args) # Create a local directory tmpdir = tempfile.mkdtemp() # Construct the strace command print("Running the command") strace_filename = os.path.join(tmpdir,'strace.out.txt') cmd = ["strace.py", "-f", "-o", strace_filename, "-s", "1024", "-q", "--"] + args # Run the command p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = p.communicate() # Capture the stdout/stderr stdout = os.path.join(tmpdir, 'stdout.log.txt') with open(stdout, 'w') as fd: fd.write(out.decode('utf-8')) stderr = os.path.join(tmpdir, 'stderr.log.txt') with open(stderr, 'w') as fd: fd.write(err.decode('utf-8')) # Check the strace output files = extract_files(strace_filename, includes) # Now insert the execution metadata execution_metadata = { 'likelyexecutable': executable, 'commitpath': commiturl, 'args': args, } execution_metadata.update(platform_metadata) for i in range(len(files)): files[i]['execution_metadata'] = execution_metadata return files
Run the executable and capture the input and output...
def _get_video_id(self, url=None): """ Extract video id. It will try to avoid making an HTTP request if it can find the ID in the URL, but otherwise it will try to scrape it from the HTML document. Returns None in case it's unable to extract the ID at all. """ if url: html_data = self.http.request("get", url).text else: html_data = self.get_urldata() html_data = self.get_urldata() match = re.search(r'data-video-id="([0-9]+)"', html_data) if match: return match.group(1) match = re.search(r'data-videoid="([0-9]+)', html_data) if match: return match.group(1) match = re.search(r'"mediaGuid":"([0-9]+)"', html_data) if match: return match.group(1) clips = False slug = None match = re.search('params":({.*}),"query', self.get_urldata()) if match: jansson = json.loads(match.group(1)) if "seasonNumberOrVideoId" in jansson: season = jansson["seasonNumberOrVideoId"] match = re.search(r"\w-(\d+)$", season) if match: season = match.group(1) else: match = self._conentpage(self.get_urldata()) if match: # this only happen on the program page? janson2 = json.loads(match.group(1)) if janson2["formatPage"]["format"]: season = janson2["formatPage"]["format"]["seasonNumber"] return janson2["formatPage"]["format"]["videos"][str(season)]["program"][0]["id"] return None if "videoIdOrEpisodeNumber" in jansson: videp = jansson["videoIdOrEpisodeNumber"] match = re.search(r'(\w+)-(\d+)', videp) if match: episodenr = match.group(2) else: episodenr = videp clips = True match = re.search(r'(s\w+)-(\d+)', season) if match: season = match.group(2) else: # sometimes videoIdOrEpisodeNumber does not work.. this is a workaround match = re.search(r'(episode|avsnitt)-(\d+)', self.url) if match: episodenr = match.group(2) else: episodenr = season if "slug" in jansson: slug = jansson["slug"] if clips: return episodenr else: match = self._conentpage(self.get_urldata()) if match: janson = json.loads(match.group(1)) for i in janson["formatPage"]["format"]["videos"].keys(): if "program" in janson["formatPage"]["format"]["videos"][str(i)]: for n in janson["formatPage"]["format"]["videos"][i]["program"]: if str(n["episodeNumber"]) and int(episodenr) == n["episodeNumber"] and int(season) == n["seasonNumber"]: if slug is None or slug == n["formatSlug"]: return n["id"] elif n["id"] == episodenr: return episodenr parse = urlparse(self.url) match = re.search(r'/\w+/(\d+)', parse.path) if match: return match.group(1) match = re.search(r'iframe src="http://play.juicyplay.se[^\"]+id=(\d+)', html_data) if match: return match.group(1) match = re.search(r'<meta property="og:image" content="([\S]+)"', html_data) if match: return match.group(1).split("/")[-2] return None
Extract video id. It will try to avoid making an HTTP request if it can find the ID in the URL, but otherwise it will try to scrape it from the HTML document. Returns None in case it's unable to extract the ID at all.
def _apply_decorator_to_methods(cls, decorator): """ This helper can apply a given decorator to all methods on the current Resource. NOTE: In contrast to ``Resource.method_decorators``, which has a similar use-case, this method applies decorators directly and override methods in-place, while the decorators listed in ``Resource.method_decorators`` are applied on every request which is quite a waste of resources. """ for method in cls.methods: method_name = method.lower() decorated_method_func = decorator(getattr(cls, method_name)) setattr(cls, method_name, decorated_method_func)
This helper can apply a given decorator to all methods on the current Resource. NOTE: In contrast to ``Resource.method_decorators``, which has a similar use-case, this method applies decorators directly and override methods in-place, while the decorators listed in ``Resource.method_decorators`` are applied on every request which is quite a waste of resources.
def _get_handler_set(cls, request, fail_enum, header_proto=None): """Goes through the list of ClientSortControls and returns a list of unique _ValueHandlers. Maintains order, but drops ClientSortControls that have already appeared to help prevent spamming. """ added = set() handlers = [] for controls in request.sorting: control_bytes = controls.SerializeToString() if control_bytes not in added: added.add(control_bytes) handlers.append( cls._ValueHandler(controls, fail_enum, header_proto)) return handlers
Goes through the list of ClientSortControls and returns a list of unique _ValueHandlers. Maintains order, but drops ClientSortControls that have already appeared to help prevent spamming.
def save_to_cache(dxobject): ''' :param dxobject: a dxpy object handler for an object to save to the cache :raises: :exc:`~dxpy.exceptions.DXError` if this is called with dxpy.JOB_ID not set, or if "DX_PROJECT_CACHE_ID" is not found in the environment variables Clones the given object to the project cache. Example:: @dxpy.entry_point('main') def main(*args, **kwargs): x = load_from_cache(name="Indexed genome", classname='file') if x is None: x = compute_result(*args) save_to_cache(x) ''' if dxpy.JOB_ID is None: raise DXError('Not called by a job') if 'DX_PROJECT_CACHE_ID' not in os.environ: raise DXError('Project cache ID could not be found in the environment variable DX_PROJECT_CACHE_ID') dxobject.clone(os.environ.get('DX_PROJECT_CACHE_ID'))
:param dxobject: a dxpy object handler for an object to save to the cache :raises: :exc:`~dxpy.exceptions.DXError` if this is called with dxpy.JOB_ID not set, or if "DX_PROJECT_CACHE_ID" is not found in the environment variables Clones the given object to the project cache. Example:: @dxpy.entry_point('main') def main(*args, **kwargs): x = load_from_cache(name="Indexed genome", classname='file') if x is None: x = compute_result(*args) save_to_cache(x)
def get_submission(self, submissionid, user_check=True): """ Get a submission from the database """ sub = self._database.submissions.find_one({'_id': ObjectId(submissionid)}) if user_check and not self.user_is_submission_owner(sub): return None return sub
Get a submission from the database
def before(self, callback: Union[Callable, str]) -> "Control": """Register a control method that reacts before the trigger method is called. Parameters: callback: The control method. If given as a callable, then that function will be used as the callback. If given as a string, then the control will look up a method with that name when reacting (useful when subclassing). """ if isinstance(callback, Control): callback = callback._before self._before = callback return self
Register a control method that reacts before the trigger method is called. Parameters: callback: The control method. If given as a callable, then that function will be used as the callback. If given as a string, then the control will look up a method with that name when reacting (useful when subclassing).
def print_graph(self, format=None, output=sys.stdout, depth=0, **kwargs): """ Print the graph for self's nodes. Args: format (str): output format (csv, json or text). output (file): file descriptor on which to write. depth (int): depth of the graph. """ graph = self.as_graph(depth=depth) graph.print(format=format, output=output, **kwargs)
Print the graph for self's nodes. Args: format (str): output format (csv, json or text). output (file): file descriptor on which to write. depth (int): depth of the graph.
def i2c_write(self, address, *args): """ Write data to an i2c device. :param address: i2c device address :param args: A variable number of bytes to be sent to the device """ data = [address, self.I2C_WRITE] for item in args: data.append(item & 0x7f) data.append((item >> 7) & 0x7f) self._command_handler.send_sysex(self._command_handler.I2C_REQUEST, data)
Write data to an i2c device. :param address: i2c device address :param args: A variable number of bytes to be sent to the device
def list_namespaced_config_map(self, namespace, **kwargs): """ list or watch objects of kind ConfigMap This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_config_map(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ConfigMapList If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.list_namespaced_config_map_with_http_info(namespace, **kwargs) else: (data) = self.list_namespaced_config_map_with_http_info(namespace, **kwargs) return data
list or watch objects of kind ConfigMap This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.list_namespaced_config_map(namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1ConfigMapList If the method is called asynchronously, returns the request thread.
def on_epoch_end(self, epoch, **kwargs:Any)->None: "Compare the value monitored to its best and maybe reduce lr." current = self.get_monitor_value() if current is None: return if self.operator(current - self.min_delta, self.best): self.best,self.wait = current,0 else: self.wait += 1 if self.wait > self.patience: self.opt.lr *= self.factor self.wait = 0 print(f'Epoch {epoch}: reducing lr to {self.opt.lr}')
Compare the value monitored to its best and maybe reduce lr.
def has_device_info(self, key): """Return true iff cache has information about the device.""" if _debug: DeviceInfoCache._debug("has_device_info %r", key) return key in self.cache
Return true iff cache has information about the device.
def _maybe_limit_chromosomes(data): """Potentially limit chromosomes to avoid problematically named HLA contigs. HLAs have ':' characters in them which confuse downstream processing. If we have no problematic chromosomes we don't limit anything. """ std_chroms = [] prob_chroms = [] noalt_calling = "noalt_calling" in dd.get_tools_on(data) or "altcontigs" in dd.get_exclude_regions(data) for contig in ref.file_contigs(dd.get_ref_file(data)): if contig.name.find(":") > 0 or (noalt_calling and not chromhacks.is_nonalt(contig.name)): prob_chroms.append(contig.name) else: std_chroms.append(contig.name) if len(prob_chroms) > 0: return std_chroms else: return []
Potentially limit chromosomes to avoid problematically named HLA contigs. HLAs have ':' characters in them which confuse downstream processing. If we have no problematic chromosomes we don't limit anything.
def SoS_exec(script: str, _dict: dict = None, return_result: bool = True) -> None: '''Execute a statement.''' if _dict is None: _dict = env.sos_dict.dict() if not return_result: exec( compile(script, filename=stmtHash.hash(script), mode='exec'), _dict) return None try: stmts = list(ast.iter_child_nodes(ast.parse(script))) if not stmts: return if isinstance(stmts[-1], ast.Expr): # the last one is an expression and we will try to return the results # so we first execute the previous statements if len(stmts) > 1: exec( compile( ast.Module(body=stmts[:-1]), filename=stmtHash.hash(script), mode="exec"), _dict) # then we eval the last one res = eval( compile( ast.Expression(body=stmts[-1].value), filename=stmtHash.hash(script), mode="eval"), _dict) else: # otherwise we just execute the entire code exec( compile(script, filename=stmtHash.hash(script), mode='exec'), _dict) res = None except SyntaxError as e: raise SyntaxError(f"Invalid code {script}: {e}") # if check_readonly: # env.sos_dict.check_readonly_vars() return res
Execute a statement.
def extract_arguments(frame): """ Extracts the arguments from given frame. :param frame: Frame. :type frame: object :return: Arguments. :rtype: tuple """ arguments = ([], None, None) try: source = textwrap.dedent("".join(inspect.getsourcelines(frame)[0]).replace("\\\n", "")) except (IOError, TypeError) as error: return arguments try: node = ast.parse(source) except: return arguments if not node.body: return arguments node = node.body[0] if not isinstance(node, ast.FunctionDef): return arguments return [arg.id for arg in node.args.args], node.args.vararg, node.args.kwarg
Extracts the arguments from given frame. :param frame: Frame. :type frame: object :return: Arguments. :rtype: tuple
def save_retinotopy_cache(sdir, sid, hemi, props, alignment='MSMAll', overwrite=False): ''' Saves the subject's retinotopy cache from the given properties. The first argument is the subject's directory (not the subjects' directory). ''' h = hemi[:2] htype = hemi.split('_')[1] if _auto_download_options is None \ or 'retinotopy_cache' not in _auto_download_options \ or not _auto_download_options['retinotopy_cache']: return files = {k:os.path.join(sdir, 'retinotopy', v % (h, alignment)) for (k,v) in six.iteritems(_retinotopy_cache_tr[htype])} for (p,fl) in six.iteritems(files): if p not in props or (not overwrite and os.path.exists(fl)): continue p = np.asarray(props[p]) if np.issubdtype(p.dtype, np.floating): p = np.asarray(p, np.float32) dr = os.path.split(os.path.abspath(fl))[0] if not os.path.isdir(dr): os.makedirs(os.path.abspath(dr), 0o755) nyio.save(fl, p)
Saves the subject's retinotopy cache from the given properties. The first argument is the subject's directory (not the subjects' directory).
def delete_event(self, id, **data): """ DELETE /events/:id/ Deletes an event if the delete is permitted. In order for a delete to be permitted, there must be no pending or completed orders. Returns a boolean indicating success or failure of the delete. """ return self.delete("/events/{0}/".format(id), data=data)
DELETE /events/:id/ Deletes an event if the delete is permitted. In order for a delete to be permitted, there must be no pending or completed orders. Returns a boolean indicating success or failure of the delete.
def minimum_required(version): """Decorator to specify the minimum SDK version required. Args: version (str): valid version string Returns: A decorator function. """ def _minimum_required(func): """Internal decorator that wraps around the given function. Args: func (function): function being decorated Returns: The wrapper unction. """ @functools.wraps(func) def wrapper(self, *args, **kwargs): """Wrapper function to compare the DLL's SDK version. Args: self (JLink): the ``JLink`` instance args (list): list of arguments to pass to ``func`` kwargs (dict): key-word arguments dict to pass to ``func`` Returns: The return value of the wrapped function. Raises: JLinkException: if the DLL's version is less than ``version``. """ if list(self.version) < list(version): raise errors.JLinkException('Version %s required.' % version) return func(self, *args, **kwargs) return wrapper return _minimum_required
Decorator to specify the minimum SDK version required. Args: version (str): valid version string Returns: A decorator function.
def _copy_to_configdir(items, out_dir, args): """Copy configuration files like PED inputs to working config directory. """ out = [] for item in items: ped_file = tz.get_in(["metadata", "ped"], item) if ped_file and os.path.exists(ped_file): ped_config_file = os.path.join(out_dir, "config", os.path.basename(ped_file)) if not os.path.exists(ped_config_file): shutil.copy(ped_file, ped_config_file) item["metadata"]["ped"] = ped_config_file out.append(item) if hasattr(args, "systemconfig") and args.systemconfig: shutil.copy(args.systemconfig, os.path.join(out_dir, "config", os.path.basename(args.systemconfig))) return out
Copy configuration files like PED inputs to working config directory.
def write_dict_to_yaml(dictionary, path, **kwargs): """ Writes a dictionary to a yaml file :param dictionary: the dictionary to be written :param path: the absolute path of the target yaml file :param kwargs: optional additional parameters for dumper """ with open(path, 'w') as f: yaml.dump(dictionary, f, indent=4, **kwargs)
Writes a dictionary to a yaml file :param dictionary: the dictionary to be written :param path: the absolute path of the target yaml file :param kwargs: optional additional parameters for dumper
def _restore_constructor(self, cls): """ Restore the original constructor, lose track of class. """ cls.__init__ = self._observers[cls].init del self._observers[cls]
Restore the original constructor, lose track of class.
def _compute_forearc_backarc_term(self, C, sites, dists): """ Computes the forearc/backarc scaling term given by equation (4). """ f_faba = np.zeros_like(dists.rhypo) # Term only applies to backarc sites (F_FABA = 0. for forearc) max_dist = dists.rhypo[sites.backarc] max_dist[max_dist < 85.0] = 85.0 f_faba[sites.backarc] = C['theta7'] +\ (C['theta8'] * np.log(max_dist / 40.0)) return f_faba
Computes the forearc/backarc scaling term given by equation (4).
def as_xml(self,parent): """Create vcard-tmp XML representation of the field. :Parameters: - `parent`: parent node for the element :Types: - `parent`: `libxml2.xmlNode` :return: xml node with the field data. :returntype: `libxml2.xmlNode`""" n=parent.newChild(None,"N",None) n.newTextChild(None,"FAMILY",to_utf8(self.family)) n.newTextChild(None,"GIVEN",to_utf8(self.given)) n.newTextChild(None,"MIDDLE",to_utf8(self.middle)) n.newTextChild(None,"PREFIX",to_utf8(self.prefix)) n.newTextChild(None,"SUFFIX",to_utf8(self.suffix)) return n
Create vcard-tmp XML representation of the field. :Parameters: - `parent`: parent node for the element :Types: - `parent`: `libxml2.xmlNode` :return: xml node with the field data. :returntype: `libxml2.xmlNode`
def clip_box(dataset, bounds=None, invert=True, factor=0.35): """Clips a dataset by a bounding box defined by the bounds. If no bounds are given, a corner of the dataset bounds will be removed. Parameters ---------- bounds : tuple(float) Length 6 iterable of floats: (xmin, xmax, ymin, ymax, zmin, zmax) invert : bool Flag on whether to flip/invert the clip factor : float, optional If bounds are not given this is the factor along each axis to extract the default box. """ if bounds is None: def _get_quarter(dmin, dmax): """internal helper to get a section of the given range""" return dmax - ((dmax - dmin) * factor) xmin, xmax, ymin, ymax, zmin, zmax = dataset.bounds xmin = _get_quarter(xmin, xmax) ymin = _get_quarter(ymin, ymax) zmin = _get_quarter(zmin, zmax) bounds = [xmin, xmax, ymin, ymax, zmin, zmax] if isinstance(bounds, (float, int)): bounds = [bounds, bounds, bounds] if len(bounds) == 3: xmin, xmax, ymin, ymax, zmin, zmax = dataset.bounds bounds = (xmin,xmin+bounds[0], ymin,ymin+bounds[1], zmin,zmin+bounds[2]) if not isinstance(bounds, collections.Iterable) or len(bounds) != 6: raise AssertionError('Bounds must be a length 6 iterable of floats') xmin, xmax, ymin, ymax, zmin, zmax = bounds alg = vtk.vtkBoxClipDataSet() alg.SetInputDataObject(dataset) alg.SetBoxClip(xmin, xmax, ymin, ymax, zmin, zmax) port = 0 if invert: # invert the clip if needed port = 1 alg.GenerateClippedOutputOn() alg.Update() return _get_output(alg, oport=port)
Clips a dataset by a bounding box defined by the bounds. If no bounds are given, a corner of the dataset bounds will be removed. Parameters ---------- bounds : tuple(float) Length 6 iterable of floats: (xmin, xmax, ymin, ymax, zmin, zmax) invert : bool Flag on whether to flip/invert the clip factor : float, optional If bounds are not given this is the factor along each axis to extract the default box.
def checkout_git_repo(git_url, target_dir=None, commit=None, retry_times=GIT_MAX_RETRIES, branch=None, depth=None): """ clone provided git repo to target_dir, optionally checkout provided commit yield the ClonedRepoData and delete the repo when finished :param git_url: str, git repo to clone :param target_dir: str, filesystem path where the repo should be cloned :param commit: str, commit to checkout, SHA-1 or ref :param retry_times: int, number of retries for git clone :param branch: str, optional branch of the commit, required if depth is provided :param depth: int, optional expected depth :return: str, int, commit ID of HEAD """ tmpdir = tempfile.mkdtemp() target_dir = target_dir or os.path.join(tmpdir, "repo") try: yield clone_git_repo(git_url, target_dir, commit, retry_times, branch, depth) finally: shutil.rmtree(tmpdir)
clone provided git repo to target_dir, optionally checkout provided commit yield the ClonedRepoData and delete the repo when finished :param git_url: str, git repo to clone :param target_dir: str, filesystem path where the repo should be cloned :param commit: str, commit to checkout, SHA-1 or ref :param retry_times: int, number of retries for git clone :param branch: str, optional branch of the commit, required if depth is provided :param depth: int, optional expected depth :return: str, int, commit ID of HEAD
def union(self, other): """ Merges this range with a given range. >>> intrange(1, 5).union(intrange(5, 10)) intrange([1,10)) >>> intrange(1, 10).union(intrange(5, 15)) intrange([1,15)) Two ranges can not be merged if the resulting range would be split in two. This happens when the two sets are neither adjacent nor overlaps. >>> intrange(1, 5).union(intrange(10, 15)) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: Ranges must be either adjacent or overlapping This does not modify the range in place. This is the same as the ``+`` operator for two ranges in PostgreSQL. :param other: Range to merge with. :return: A new range that is the union of this and `other`. :raises ValueError: If `other` can not be merged with this range. """ if not self.is_valid_range(other): msg = "Unsupported type to test for union '{.__class__.__name__}'" raise TypeError(msg.format(other)) # Optimize empty ranges if not self: return other elif not other: return self # Order ranges to simplify checks if self < other: a, b = self, other else: a, b = other, self if (a.upper < b.lower or a.upper == b.lower and not a.upper_inc and not b.lower_inc) and not a.adjacent(b): raise ValueError("Ranges must be either adjacent or overlapping") # a.lower is guaranteed to be the lower bound, but either a.upper or # b.upper can be the upper bound if a.upper == b.upper: upper = a.upper upper_inc = a.upper_inc or b.upper_inc elif a.upper < b.upper: upper = b.upper upper_inc = b.upper_inc else: upper = a.upper upper_inc = a.upper_inc return self.__class__(a.lower, upper, a.lower_inc, upper_inc)
Merges this range with a given range. >>> intrange(1, 5).union(intrange(5, 10)) intrange([1,10)) >>> intrange(1, 10).union(intrange(5, 15)) intrange([1,15)) Two ranges can not be merged if the resulting range would be split in two. This happens when the two sets are neither adjacent nor overlaps. >>> intrange(1, 5).union(intrange(10, 15)) Traceback (most recent call last): File "<stdin>", line 1, in <module> ValueError: Ranges must be either adjacent or overlapping This does not modify the range in place. This is the same as the ``+`` operator for two ranges in PostgreSQL. :param other: Range to merge with. :return: A new range that is the union of this and `other`. :raises ValueError: If `other` can not be merged with this range.
def notify(request): ''' This view gets a POST request from the Javascript part of the AutoreloadPanel that contains a body that looks like:: template=/full/path/to/template.html&template=/another/template.eml:123456789& media=/static/url/to/a/file:133456780&media=http://media.localhost.local/base.css It is a list of template paths and a list of URLs that are part of the static/media directories of the project. The filename might be followed by a unix-epoch timestamp of the last modified date, seperated by a colon. The view then blocks the response as long until one of the specified files has a modified-time that is newer than the specified timestamp. It will return a line seperated list of those changed files. The view might also return with an empty response and status 204 (No Content) if the source code that the development server runs was modified. This is needed to free the current thread and allow django's runserver command to reload the source code, to take those changes into account. ''' def get_resources(names, resource_class): resources = [] for name in names: timestamp = None if ':' in name: name, timestamp = name.split(':', 1) try: timestamp = float(timestamp) except (ValueError, TypeError): timestamp = None resources.append(resource_class(name, timestamp)) return resources resources = get_resources(request.REQUEST.getlist('template'), Resource) resources += get_resources(request.REQUEST.getlist('media'), MediaResource) file_watcher = FileWatcher(resources) suspender = Suspender() updates = None while not updates: time.sleep(0.5) # break the watching action and return a response to release the # running thread. This is necessary since the looped check would # prevent django from loading changed source code or quitting the # development server with CTRL-C if suspender.should_suspend(): response = HttpResponse() response.status_code = 204 return response updates = file_watcher.get_updated_files() response = HttpResponse(json.dumps([ {'src': resource.name, 'mtime': resource.mtime} for resource in updates ])) return response
This view gets a POST request from the Javascript part of the AutoreloadPanel that contains a body that looks like:: template=/full/path/to/template.html&template=/another/template.eml:123456789& media=/static/url/to/a/file:133456780&media=http://media.localhost.local/base.css It is a list of template paths and a list of URLs that are part of the static/media directories of the project. The filename might be followed by a unix-epoch timestamp of the last modified date, seperated by a colon. The view then blocks the response as long until one of the specified files has a modified-time that is newer than the specified timestamp. It will return a line seperated list of those changed files. The view might also return with an empty response and status 204 (No Content) if the source code that the development server runs was modified. This is needed to free the current thread and allow django's runserver command to reload the source code, to take those changes into account.
def insert(conn, qualified_name: str, column_names, records): """Insert a collection of namedtuple records.""" query = create_insert_statement(qualified_name, column_names) with conn: with conn.cursor(cursor_factory=NamedTupleCursor) as cursor: for record in records: cursor.execute(query, record)
Insert a collection of namedtuple records.
def post(self, *messages): """Executes an HTTP request to create message on the queue. Creates queue if not existed. Arguments: messages -- An array of messages to be added to the queue. """ url = "queues/%s/messages" % self.name msgs = [{'body': msg} if isinstance(msg, basestring) else msg for msg in messages] data = json.dumps({'messages': msgs}) result = self.client.post(url=url, body=data, headers={'Content-Type': 'application/json'}) return result['body']
Executes an HTTP request to create message on the queue. Creates queue if not existed. Arguments: messages -- An array of messages to be added to the queue.
def withSize(cls, minimum, maximum): """Creates a subclass with value size constraint. """ class X(cls): subtypeSpec = cls.subtypeSpec + constraint.ValueSizeConstraint( minimum, maximum) X.__name__ = cls.__name__ return X
Creates a subclass with value size constraint.
def create_prj_model(self, ): """Create and return a tree model that represents a list of projects :returns: the creeated model :rtype: :class:`jukeboxcore.gui.treemodel.TreeModel` :raises: None """ prjs = djadapter.projects.all() rootdata = treemodel.ListItemData(['Name', 'Short', 'Rootpath']) prjroot = treemodel.TreeItem(rootdata) for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) treemodel.TreeItem(prjdata, prjroot) prjmodel = treemodel.TreeModel(prjroot) return prjmodel
Create and return a tree model that represents a list of projects :returns: the creeated model :rtype: :class:`jukeboxcore.gui.treemodel.TreeModel` :raises: None
def neo(graph: BELGraph, connection: str, password: str): """Upload to neo4j.""" import py2neo neo_graph = py2neo.Graph(connection, password=password) to_neo4j(graph, neo_graph)
Upload to neo4j.
def configure (command = None, condition = None, options = None): """ Configures a new resource compilation command specific to a condition, usually a toolset selection condition. The possible options are: * <rc-type>(rc|windres) - Indicates the type of options the command accepts. Even though the arguments are all optional, only when a command, condition, and at minimum the rc-type option are given will the command be configured. This is so that callers don't have to check auto-configuration values before calling this. And still get the functionality of build failures when the resource compiler can't be found. """ rc_type = feature.get_values('<rc-type>', options) if rc_type: assert(len(rc_type) == 1) rc_type = rc_type[0] if command and condition and rc_type: flags('rc.compile.resource', '.RC', condition, command) flags('rc.compile.resource', '.RC_TYPE', condition, [rc_type.lower()]) flags('rc.compile.resource', 'DEFINES', [], ['<define>']) flags('rc.compile.resource', 'INCLUDES', [], ['<include>']) if debug(): print 'notice: using rc compiler ::', condition, '::', command
Configures a new resource compilation command specific to a condition, usually a toolset selection condition. The possible options are: * <rc-type>(rc|windres) - Indicates the type of options the command accepts. Even though the arguments are all optional, only when a command, condition, and at minimum the rc-type option are given will the command be configured. This is so that callers don't have to check auto-configuration values before calling this. And still get the functionality of build failures when the resource compiler can't be found.
def COOKIES(self): """ A dict-like SimpleCookie instance. This should not be used directly. See :meth:`set_cookie`. """ depr('The COOKIES dict is deprecated. Use `set_cookie()` instead.') # 0.10 if not self._cookies: self._cookies = SimpleCookie() return self._cookies
A dict-like SimpleCookie instance. This should not be used directly. See :meth:`set_cookie`.
def process(self, candidates): """ :arg list candidates: list of Candidates :returns: score-sorted list of Candidates """ return sorted(candidates, key=attrgetter('score'), reverse=self.reverse)
:arg list candidates: list of Candidates :returns: score-sorted list of Candidates
def convert_iou(pinyin): """iou 转换,还原原始的韵母 iou,uei,uen前面加声母的时候,写成iu,ui,un。 例如niu(牛),gui(归),lun(论)。 """ return IU_RE.sub(lambda m: m.group(1) + IU_MAP[m.group(2)], pinyin)
iou 转换,还原原始的韵母 iou,uei,uen前面加声母的时候,写成iu,ui,un。 例如niu(牛),gui(归),lun(论)。
def handle_user_post_save(sender, **kwargs): # pylint: disable=unused-argument """ Handle User model changes - checks if pending enterprise customer user record exists and upgrades it to actual link. If there are pending enrollments attached to the PendingEnterpriseCustomerUser, then this signal also takes the newly-created users and enrolls them in the relevant courses. """ created = kwargs.get("created", False) user_instance = kwargs.get("instance", None) if user_instance is None: return # should never happen, but better safe than 500 error try: pending_ecu = PendingEnterpriseCustomerUser.objects.get(user_email=user_instance.email) except PendingEnterpriseCustomerUser.DoesNotExist: return # nothing to do in this case if not created: # existing user changed his email to match one of pending link records - try linking him to EC try: existing_record = EnterpriseCustomerUser.objects.get(user_id=user_instance.id) message_template = "User {user} have changed email to match pending Enterprise Customer link, " \ "but was already linked to Enterprise Customer {enterprise_customer} - " \ "deleting pending link record" logger.info(message_template.format( user=user_instance, enterprise_customer=existing_record.enterprise_customer )) pending_ecu.delete() return except EnterpriseCustomerUser.DoesNotExist: pass # everything ok - current user is not linked to other ECs enterprise_customer_user = EnterpriseCustomerUser.objects.create( enterprise_customer=pending_ecu.enterprise_customer, user_id=user_instance.id ) pending_enrollments = list(pending_ecu.pendingenrollment_set.all()) if pending_enrollments: def _complete_user_enrollment(): # pylint: disable=missing-docstring for enrollment in pending_enrollments: # EnterpriseCustomers may enroll users in courses before the users themselves # actually exist in the system; in such a case, the enrollment for each such # course is finalized when the user registers with the OpenEdX platform. enterprise_customer_user.enroll( enrollment.course_id, enrollment.course_mode, cohort=enrollment.cohort_name) track_enrollment('pending-admin-enrollment', user_instance.id, enrollment.course_id) pending_ecu.delete() transaction.on_commit(_complete_user_enrollment) else: pending_ecu.delete()
Handle User model changes - checks if pending enterprise customer user record exists and upgrades it to actual link. If there are pending enrollments attached to the PendingEnterpriseCustomerUser, then this signal also takes the newly-created users and enrolls them in the relevant courses.
def where(self, predicate): """ Returns new Enumerable where elements matching predicate are selected :param predicate: predicate as a lambda expression :return: new Enumerable object """ if predicate is None: raise NullArgumentError(u"No predicate given for where clause") return Enumerable3(filter(predicate, self))
Returns new Enumerable where elements matching predicate are selected :param predicate: predicate as a lambda expression :return: new Enumerable object
def pipeline( ctx, input_fn, db_save, db_delete, output_fn, rules, species, namespace_targets, version, api, config_fn, ): """BEL Pipeline - BEL Nanopubs into BEL Edges This will process BEL Nanopubs into BEL Edges by validating, orthologizing (if requested), canonicalizing, and then computing the BEL Edges based on the given rule_set. \b input_fn: If input fn has *.gz, will read as a gzip file If input fn has *.jsonl*, will parsed as a JSONLines file IF input fn has *.json*, will be parsed as a JSON file If input fn has *.yaml* or *.yml*, will be parsed as a YAML file \b output_fn: If output fn has *.gz, will written as a gzip file If output fn has *.jsonl*, will written as a JSONLines file IF output fn has *.json*, will be written as a JSON file If output fn has *.yaml* or *.yml*, will be written as a YAML file If output fn has *.jgf, will be written as JSON Graph Formatted file """ if config_fn: config = bel.db.Config.merge_config(ctx.config, override_config_fn=config_fn) else: config = ctx.config # Configuration - will return the first truthy result in list else the default option if namespace_targets: namespace_targets = json.loads(namespace_targets) if rules: rules = rules.replace(" ", "").split(",") namespace_targets = utils.first_true( [namespace_targets, config["bel"]["lang"].get("canonical")], None ) rules = utils.first_true( [rules, config["bel"]["nanopub"].get("pipeline_edge_rules", False)], False ) api = utils.first_true( [api, config["bel_api"]["servers"].get("api_url", None)], None ) version = utils.first_true( [version, config["bel"]["lang"].get("default_bel_version", None)], None ) n = bnn.Nanopub() try: json_flag, jsonl_flag, yaml_flag, jgf_flag = False, False, False, False all_bel_edges = [] fout = None if db_save or db_delete: if db_delete: arango_client = bel.db.arangodb.get_client() bel.db.arangodb.delete_database(arango_client, "edgestore") else: arango_client = bel.db.arangodb.get_client() edgestore_handle = bel.db.arangodb.get_edgestore_handle(arango_client) elif re.search("ya?ml", output_fn): yaml_flag = True elif "jsonl" in output_fn: jsonl_flag = True elif "json" in output_fn: json_flag = True elif "jgf" in output_fn: jgf_flag = True if db_save: pass elif "gz" in output_fn: fout = gzip.open(output_fn, "wt") else: fout = open(output_fn, "wt") nanopub_cnt = 0 with timy.Timer() as timer: for np in bnf.read_nanopubs(input_fn): # print('Nanopub:\n', json.dumps(np, indent=4)) nanopub_cnt += 1 if nanopub_cnt % 100 == 0: timer.track(f"{nanopub_cnt} Nanopubs processed into Edges") bel_edges = n.bel_edges( np, namespace_targets=namespace_targets, orthologize_target=species, rules=rules, ) if db_save: bel.edge.edges.load_edges_into_db(edgestore_handle, edges=bel_edges) elif jsonl_flag: fout.write("{}\n".format(json.dumps(bel_edges))) else: all_bel_edges.extend(bel_edges) if db_save: pass elif yaml_flag: fout.write("{}\n".format(yaml.dumps(all_bel_edges))) elif json_flag: fout.write("{}\n".format(json.dumps(all_bel_edges))) elif jgf_flag: bnf.edges_to_jgf(output_fn, all_bel_edges) finally: if fout: fout.close()
BEL Pipeline - BEL Nanopubs into BEL Edges This will process BEL Nanopubs into BEL Edges by validating, orthologizing (if requested), canonicalizing, and then computing the BEL Edges based on the given rule_set. \b input_fn: If input fn has *.gz, will read as a gzip file If input fn has *.jsonl*, will parsed as a JSONLines file IF input fn has *.json*, will be parsed as a JSON file If input fn has *.yaml* or *.yml*, will be parsed as a YAML file \b output_fn: If output fn has *.gz, will written as a gzip file If output fn has *.jsonl*, will written as a JSONLines file IF output fn has *.json*, will be written as a JSON file If output fn has *.yaml* or *.yml*, will be written as a YAML file If output fn has *.jgf, will be written as JSON Graph Formatted file