Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
3,600
def labels(ctx): config = ctx.obj[] repos = config.get() labels = config.get() if not isinstance(repos, list): raise CommandError( ) if not isinstance(labels, dict): raise CommandError( ) git = GithubApi() for repo in repos: repo = git.repo(repo) for label, color in labels.items(): if repo.label(label, color): click.echo( % (label, repo)) else: click.echo( % (label, repo))
Crate or update labels in github
3,601
def run_lldptool(self, args): full_args = [] + args try: return utils.execute(full_args, root_helper=self.root_helper) except Exception as exc: LOG.error("Unable to execute %(cmd)s. " "Exception: %(exception)s", {: full_args, : str(exc)})
Function for invoking the lldptool utility.
3,602
def cfrom(self): cfrom = -1 try: if self.lnk.type == Lnk.CHARSPAN: cfrom = self.lnk.data[0] except AttributeError: pass return cfrom
The initial character position in the surface string. Defaults to -1 if there is no valid cfrom value.
3,603
def name_suggest(q=None, datasetKey=None, rank=None, limit=100, offset=None, **kwargs): Puma concolorPumaPumaPumaPumaPumaPuma url = gbif_baseurl + args = {:q, :rank, :offset, :limit} return gbif_GET(url, args, **kwargs)
A quick and simple autocomplete service that returns up to 20 name usages by doing prefix matching against the scientific name. Results are ordered by relevance. :param q: [str] Simple search parameter. The value for this parameter can be a simple word or a phrase. Wildcards can be added to the simple word parameters only, e.g. ``q=*puma*`` (Required) :param datasetKey: [str] Filters by the checklist dataset key (a uuid, see examples) :param rank: [str] A taxonomic rank. One of ``class``, ``cultivar``, ``cultivar_group``, ``domain``, ``family``, ``form``, ``genus``, ``informal``, ``infrageneric_name``, ``infraorder``, ``infraspecific_name``, ``infrasubspecific_name``, ``kingdom``, ``order``, ``phylum``, ``section``, ``series``, ``species``, ``strain``, ``subclass``, ``subfamily``, ``subform``, ``subgenus``, ``subkingdom``, ``suborder``, ``subphylum``, ``subsection``, ``subseries``, ``subspecies``, ``subtribe``, ``subvariety``, ``superclass``, ``superfamily``, ``superorder``, ``superphylum``, ``suprageneric_name``, ``tribe``, ``unranked``, or ``variety``. :param limit: [fixnum] Number of records to return. Maximum: ``1000``. (optional) :param offset: [fixnum] Record number to start at. (optional) :return: A dictionary References: http://www.gbif.org/developer/species#searching Usage:: from pygbif import species species.name_suggest(q='Puma concolor') x = species.name_suggest(q='Puma') species.name_suggest(q='Puma', rank="genus") species.name_suggest(q='Puma', rank="subspecies") species.name_suggest(q='Puma', rank="species") species.name_suggest(q='Puma', rank="infraspecific_name") species.name_suggest(q='Puma', limit=2)
3,604
def create_introspect_response(self, uri, http_method=, body=None, headers=None): resp_headers = { : , : , : , } request = Request(uri, http_method, body, headers) try: self.validate_introspect_request(request) log.debug(, request) except OAuth2Error as e: log.debug(, request, e) resp_headers.update(e.headers) return resp_headers, e.json, e.status_code claims = self.request_validator.introspect_token( request.token, request.token_type_hint, request ) if claims is None: return resp_headers, json.dumps(dict(active=False)), 200 if "active" in claims: claims.pop("active") return resp_headers, json.dumps(dict(active=True, **claims)), 200
Create introspect valid or invalid response If the authorization server is unable to determine the state of the token without additional information, it SHOULD return an introspection response indicating the token is not active as described in Section 2.2.
3,605
def _instantiate_task(api, kwargs): file_id = kwargs[] kwargs[] = file_id if str(file_id).strip() else None kwargs[] = kwargs[] or None kwargs[] = kwargs[] kwargs[] = kwargs[] kwargs[] = get_utcdatetime(kwargs[]) kwargs[] = get_utcdatetime(kwargs[]) is_transferred = (kwargs[] == 2 and kwargs[] == 1) if is_transferred: kwargs[] = api.downloads_directory.cid else: kwargs[] = None del kwargs[] del kwargs[] if in kwargs: if not kwargs[]: kwargs[] = None else: kwargs[] = None task = Task(api, **kwargs) if is_transferred: task._parent = api.downloads_directory return task
Create a Task object from raw kwargs
3,606
def pk_names(cls): if cls._cache_pk_names is None: cls._cache_pk_names = cls._get_primary_key_names() return cls._cache_pk_names
Primary key column name list.
3,607
def _wait_for_process(self, pid, name): try: logging.debug("Waiting for process %s with pid %s", name, pid) unused_pid, exitcode, ru_child = os.wait4(pid, 0) return exitcode, ru_child except OSError as e: if self.PROCESS_KILLED and e.errno == errno.EINTR: logging.debug("OSError %s while waiting for termination of %s (%s): %s.", e.errno, name, pid, e.strerror) try: unused_pid, exitcode, ru_child = os.wait4(pid, 0) return exitcode, ru_child except OSError: pass logging.critical("OSError %s while waiting for termination of %s (%s): %s.", e.errno, name, pid, e.strerror) return (0, None)
Wait for the given process to terminate. @return tuple of exit code and resource usage
3,608
def buy_close(id_or_ins, amount, price=None, style=None, close_today=False): position_effect = POSITION_EFFECT.CLOSE_TODAY if close_today else POSITION_EFFECT.CLOSE return order(id_or_ins, amount, SIDE.BUY, position_effect, cal_style(price, style))
平卖仓 :param id_or_ins: 下单标的物 :type id_or_ins: :class:`~Instrument` object | `str` | List[:class:`~Instrument`] | List[`str`] :param int amount: 下单手数 :param float price: 下单价格,默认为None,表示 :class:`~MarketOrder`, 此参数主要用于简化 `style` 参数。 :param style: 下单类型, 默认是市价单。目前支持的订单类型有 :class:`~LimitOrder` 和 :class:`~MarketOrder` :type style: `OrderStyle` object :param bool close_today: 是否指定发平今仓单,默认为False,发送平仓单 :return: :class:`~Order` object | list[:class:`~Order`] | None :example: .. code-block:: python #市价单将现有IF1603空仓买入平仓2张: buy_close('IF1603', 2)
3,609
def splitter(iterable, chunksize=60): return (iterable[0+i:chunksize+i] for i in range(0, len(iterable), chunksize))
Split an iterable that supports indexing into chunks of 'chunksize'.
3,610
def log_posterior_transit_plus_line(theta, params, model, t, flux, err_flux, priorbounds): lp = _log_prior_transit_plus_line(theta, priorbounds) if not np.isfinite(lp): return -np.inf else: return ( lp + _log_likelihood_transit_plus_line( theta, params, model, t, flux, err_flux, priorbounds) )
Evaluate posterior probability given proposed model parameters and the observed flux timeseries.
3,611
def get_cameras_signal_strength(self): signal_strength = {} if not self.camera_properties: return None for camera in self.camera_properties: serialnum = camera.get() cam_strength = camera.get() signal_strength[serialnum] = cam_strength return signal_strength
Return a list of signal strength of all cameras.
3,612
def add(self, datapoint): if not self.is_full: self.set_datapoint(self.cur_index, datapoint) self.cur_index += 1
Adds the datapoint to the tensor if room is available.
3,613
def _add_chrome_arguments(self, options): try: for pref, pref_value in dict(self.config.items()).items(): pref_value = .format(pref_value) if pref_value else self.logger.debug("Added chrome argument: %s%s", pref, pref_value) options.add_argument(.format(pref, self._convert_property_type(pref_value))) except NoSectionError: pass
Add Chrome arguments from properties file :param options: chrome options object
3,614
def wait_func_accept_retry_state(wait_func): if not six.callable(wait_func): return wait_func if func_takes_retry_state(wait_func): return wait_func if func_takes_last_result(wait_func): @_utils.wraps(wait_func) def wrapped_wait_func(retry_state): warn_about_non_retry_state_deprecation( , wait_func, stacklevel=4) return wait_func( retry_state.attempt_number, retry_state.seconds_since_start, last_result=retry_state.outcome, ) else: @_utils.wraps(wait_func) def wrapped_wait_func(retry_state): warn_about_non_retry_state_deprecation( , wait_func, stacklevel=4) return wait_func( retry_state.attempt_number, retry_state.seconds_since_start, ) return wrapped_wait_func
Wrap wait function to accept "retry_state" parameter.
3,615
def _get_log_level(level): if level is None or level == "DEBUG": return logging.DEBUG level = level.upper() if level == "INFO": return logging.INFO elif level == "WARNING": return logging.WARNING elif level == "CRITICAL": return logging.CRITICAL elif level == "ERROR": return logging.ERROR elif level == "FATAL": return logging.FATAL else: raise Exception("UnknownLogLevelException: enter a valid log level")
small static method to get logging level :param str level: string of the level e.g. "INFO" :returns logging.<LEVEL>: appropriate debug level
3,616
def fetch(args: List[str], env: Dict[str, str] = None, encoding: str = sys.getdefaultencoding()) -> str: stdout, _ = run(args, env=env, capture_stdout=True, echo_stdout=False, encoding=encoding) log.debug(stdout) return stdout
Run a command and returns its stdout. Args: args: the command-line arguments env: the operating system environment to use encoding: the encoding to use for ``stdout`` Returns: the command's ``stdout`` output
3,617
def parse(s): parts = s.replace(, ).split() if not parts: raise ValueError() pieces = [] for part in parts: m = PART_MATCH(part) pieces.extend(m.groups() if m else [part]) if len(pieces) == 1: pieces.append() if len(pieces) % 2: raise ValueError( % (s, parts, pieces)) result = 0 for number, units in zip(*[iter(pieces)] * 2): number = float(number) if number < 0: raise ValueError() result += number * _get_units(units) return result
Parse a string representing a time interval or duration into seconds, or raise an exception :param str s: a string representation of a time interval :raises ValueError: if ``s`` can't be interpreted as a duration
3,618
def _url_builder(url_root,api_key,path,params): params[] = api_key url_end = urlencode(params) url = "%s%s%s" % (url_root,path,url_end) return url
Helper funcation to build a parameterized url.
3,619
def event_handler(event_name): def wrapper(func): func._event_handler = True func._handled_event = event_name return func return wrapper
Decorator for designating a handler for an event type. ``event_name`` must be a string representing the name of the event type. The decorated function must accept a parameter: the body of the received event, which will be a Python object that can be encoded as a JSON (dict, list, str, int, bool, float or None) :param event_name: The name of the event that will be handled. Only one handler per event name is supported by the same microservice.
3,620
def collect(self): self.ignore_patterns = [ , , , , , , , ] page_themes = PageTheme.objects.all() for finder in get_finders(): for path, storage in finder.list(self.ignore_patterns): for t in page_themes: static_path = .format(t.name.split()[-1]) if static_path in path: try: page_theme = PageTheme.objects.get(id=t.id) except PageTheme.DoesNotExist: raise Exception( "Run sync_themes before this command") except Exception as e: self.stdout.write( "Cannot load {} into database original error: {}".format(t, e)) skins_path = os.path.join( storage.path(.join(path.split()[0:-1]))) for dirpath, skins, filenames in os.walk(skins_path): for skin in [s for s in skins if s not in []]: for skin_dirpath, skins, filenames in os.walk(os.path.join(dirpath, skin)): skin, created = PageColorScheme.objects.get_or_create( theme=page_theme, label=skin, name=skin.title()) for f in filenames: if in f: with codecs.open(os.path.join(skin_dirpath, f)) as style_file: skin.styles = style_file.read() elif in f: with codecs.open(os.path.join(skin_dirpath, f)) as variables_file: skin.variables = variables_file.read() skin.save() self.skins_updated += 1 self.page_themes_updated += len(page_themes)
Load and save ``PageColorScheme`` for every ``PageTheme`` .. code-block:: bash static/themes/bootswatch/united/variables.scss static/themes/bootswatch/united/styles.scss
3,621
def set_wm_wallpaper(img): if shutil.which("feh"): util.disown(["feh", "--bg-fill", img]) elif shutil.which("nitrogen"): util.disown(["nitrogen", "--set-zoom-fill", img]) elif shutil.which("bgs"): util.disown(["bgs", "-z", img]) elif shutil.which("hsetroot"): util.disown(["hsetroot", "-fill", img]) elif shutil.which("habak"): util.disown(["habak", "-mS", img]) elif shutil.which("display"): util.disown(["display", "-backdrop", "-window", "root", img]) else: logging.error("No wallpaper setter found.") return
Set the wallpaper for non desktop environments.
3,622
def sync_config_tasks(self): tasks_by_hash = {_hash_task(t): t for t in self.config_tasks} for task in self.all_tasks: if tasks_by_hash.get(task["hash"]): del tasks_by_hash[task["hash"]] else: self.collection.remove({"_id": task["_id"]}) log.debug("Scheduler: deleted %s" % task["hash"]) for h, task in tasks_by_hash.items(): task["hash"] = h task["datelastqueued"] = datetime.datetime.fromtimestamp(0) if task.get("dailytime"): task["dailytime"] = datetime.datetime.combine( datetime.datetime.utcnow(), task["dailytime"]) task["interval"] = 3600 * 24 if datetime.datetime.utcnow().time() > task["dailytime"].time(): task["datelastqueued"] = datetime.datetime.utcnow() self.collection.find_one_and_update({"hash": task["hash"]}, {"$set": task}, upsert=True) log.debug("Scheduler: added %s" % task["hash"])
Performs the first sync of a list of tasks, often defined in the config file.
3,623
def complete_io(self, iocb, msg): if _debug: IOQController._debug("complete_io %r %r", iocb, msg) if iocb is not self.active_iocb: raise RuntimeError("not the current iocb") IOController.complete_io(self, iocb, msg) self.active_iocb = None if self.wait_time: self.state = CTRL_WAITING _statelog.debug("%s %s %s" % (_strftime(), self.name, "waiting")) task = FunctionTask(IOQController._wait_trigger, self) task.install_task(delta=self.wait_time) else: self.state = CTRL_IDLE _statelog.debug("%s %s %s" % (_strftime(), self.name, "idle")) deferred(IOQController._trigger, self)
Called by a handler to return data to the client.
3,624
def set_prop(self, prop, value, ef=None): if ef: self.ef[prop] = value else: if prop == : self.ensemble = value elif prop == : self.auc = value
set attributes values :param prop: :param value: :param ef: :return:
3,625
def load_file(file_path, credentials=None): if file_path.startswith(): return _load_file_from_gcs(file_path, credentials) else: return open(file_path, )
Load a file from either local or gcs. Args: file_path: The target file path, which should have the prefix 'gs://' if to be loaded from gcs. credentials: Optional credential to be used to load the file from gcs. Returns: A python File object if loading file from local or a StringIO object if loading from gcs.
3,626
def _compute_MFP_matrix(self): self.MFP = np.zeros(self.Lp.shape) for i in range(self.Lp.shape[0]): for k in range(self.Lp.shape[1]): for j in range(self.Lp.shape[1]): self.MFP[i, k] += (self.Lp[i, j] - self.Lp[i, k] - self.Lp[k, j] + self.Lp[k, k]) * self.z[j] settings.mt(0, ) self.distances_dpt = self.MFP
See Fouss et al. (2006). This is the mean-first passage time matrix. It's not a distance. Mfp[i, k] := m(k|i) in the notation of Fouss et al. (2006). This corresponds to the standard notation for transition matrices (left index initial state, right index final state, i.e. a right-stochastic matrix, with each row summing to one).
3,627
def get_resources(cls): plugin = directory.get_plugin() controller = IPAvailabilityController(plugin) return [extensions.ResourceExtension(Ip_availability.get_alias(), controller)]
Returns Ext Resources.
3,628
def load(fp, **kwargs) -> BioCCollection: obj = json.load(fp, **kwargs) return parse_collection(obj)
Deserialize fp (a .read()-supporting text file or binary file containing a JSON document) to a BioCCollection object Args: fp: a file containing a JSON document **kwargs: Returns: BioCCollection: a collection
3,629
def list_present(name, acl_type, acl_names=None, perms=, recurse=False, force=False): usergroup if acl_names is None: acl_names = [] ret = {: name, : True, : {}, : } _octal = {: 4, : 2, : 1, : 0} _octal_perms = sum([_octal.get(i, i) for i in perms]) if not os.path.exists(name): ret[] = .format(name) ret[] = False return ret __current_perms = __salt__[](name) if acl_type.startswith((, )): _acl_type = .join(acl_type.split()[1:]) _current_perms = __current_perms[name].get(, {}) _default = True else: _acl_type = acl_type _current_perms = __current_perms[name] _default = False _origin_group = _current_perms.get(, {}).get(, None) _origin_owner = _current_perms.get(, {}).get(, None) _current_acl_types = [] diff_perms = False for key in _current_perms[acl_type]: for current_acl_name in key.keys(): _current_acl_types.append(current_acl_name.encode()) diff_perms = _octal_perms == key[current_acl_name][] if acl_type == : try: _current_acl_types.remove(_origin_owner) except ValueError: pass else: try: _current_acl_types.remove(_origin_group) except ValueError: pass diff_acls = set(_current_acl_types) ^ set(acl_names) if not diff_acls and diff_perms and not force: ret = {: name, : True, : {}, : .format(acl_type)} return ret if acl_names == : _search_names = __current_perms[name].get().get(_acl_type, ) else: _search_names = acl_names if _current_perms.get(_acl_type, None) or _default: try: users = {} for i in _current_perms[_acl_type]: if i and next(six.iterkeys(i)) in _search_names: users.update(i) except (AttributeError, KeyError): users = None if users: changes = {} for count, search_name in enumerate(_search_names): if search_name in users: if users[search_name][] == sum([_octal.get(i, i) for i in perms]): ret[] = else: changes.update({: {: .join(acl_names), : acl_type, : _octal_perms}, : {: .join(acl_names), : acl_type, : six.text_type(users[search_name][])}}) if __opts__[]: ret.update({: .format( acl_names, six.text_type(users[search_name][]), perms), : None, : changes}) return ret try: if force: __salt__[](name, recursive=recurse, raise_err=True) for acl_name in acl_names: __salt__[](acl_type, acl_name, perms, name, recursive=recurse, raise_err=True) ret.update({: .format(acl_names), : True, : changes}) except CommandExecutionError as exc: ret.update({: .format(acl_names, exc.strerror), : False}) else: changes = {: {: .join(acl_names), : acl_type, : perms}} if __opts__[]: ret.update({: .format(acl_names, perms), : None, : changes}) ret[] = None return ret try: if force: __salt__[](name, recursive=recurse, raise_err=True) for acl_name in acl_names: __salt__[](acl_type, acl_name, perms, name, recursive=recurse, raise_err=True) ret.update({: .format(.join(acl_names)), : True, : changes}) except CommandExecutionError as exc: ret.update({: .format(acl_names, exc.strerror), : False}) else: changes = {: {: .join(acl_names), : acl_type, : perms}} if __opts__[]: ret.update({: .format(acl_names, perms), : None, : changes}) ret[] = None return ret try: if force: __salt__[](name, recursive=recurse, raise_err=True) for acl_name in acl_names: __salt__[](acl_type, acl_name, perms, name, recursive=recurse, raise_err=True) ret.update({: .format(.join(acl_names)), : True, : changes}) except CommandExecutionError as exc: ret.update({: .format(acl_names, exc.strerror), : False}) else: ret[] = ret[] = False return ret
Ensure a Linux ACL list is present Takes a list of acl names and add them to the given path name The acl path acl_type The type of the acl is used for it can be 'user' or 'group' acl_names The list of users or groups perms Set the permissions eg.: rwx recurse Set the permissions recursive in the path force Wipe out old permissions and ensure only the new permissions are set
3,630
def _parse_title(dom, details): title = details.find("h1") if not title: title = dom.find("title") assert title, "Can't find <title> tag!" return title[0].getContent().split("|")[0].strip() return title[0].getContent().strip()
Parse title/name of the book. Args: dom (obj): HTMLElement containing whole HTML page. details (obj): HTMLElement containing slice of the page with details. Returns: str: Book's title. Raises: AssertionError: If title not found.
3,631
def get_dimensions_units(names): dimensions_uni = {} for name in names: key = get_key_from_dimensions(names[name].dimensions) dimensions_uni[key] = names[name] plain_dimensions = [{: name, : 1}] key = get_key_from_dimensions(plain_dimensions) dimensions_uni[key] = names[name] if not names[name].dimensions: names[name].dimensions = plain_dimensions names[name].dimensions = [{: names[i[]].name, : i[]} for i in names[name].dimensions] return dimensions_uni
Create dictionary of unit dimensions.
3,632
def use_comparative_activity_view(self): self._object_views[] = COMPARATIVE for session in self._get_provider_sessions(): try: session.use_comparative_activity_view() except AttributeError: pass
Pass through to provider ActivityLookupSession.use_comparative_activity_view
3,633
def _urljoin(base, url): parsed = urlparse(base) scheme = parsed.scheme return urlparse( urljoin(parsed._replace(scheme=).geturl(), url) )._replace(scheme=scheme).geturl()
Join relative URLs to base URLs like urllib.parse.urljoin but support arbitrary URIs (esp. 'http+unix://').
3,634
def VerifyMessageSignature(self, unused_response_comms, packed_message_list, cipher, cipher_verified, api_version, remote_public_key): _ = api_version result = rdf_flows.GrrMessage.AuthorizationState.UNAUTHENTICATED if cipher_verified or cipher.VerifyCipherSignature(remote_public_key): stats_collector_instance.Get().IncrementCounter( "grr_authenticated_messages") result = rdf_flows.GrrMessage.AuthorizationState.AUTHENTICATED if packed_message_list.timestamp != self.timestamp: result = rdf_flows.GrrMessage.AuthorizationState.UNAUTHENTICATED if not cipher.cipher_metadata: cipher.cipher_metadata = rdf_flows.CipherMetadata( source=packed_message_list.source) return result
Verify the message list signature. This is the way the messages are verified in the client. In the client we also check that the nonce returned by the server is correct (the timestamp doubles as a nonce). If the nonce fails we deem the response unauthenticated since it might have resulted from a replay attack. Args: packed_message_list: The PackedMessageList rdfvalue from the server. cipher: The cipher belonging to the remote end. cipher_verified: If True, the cipher's signature is not verified again. api_version: The api version we should use. remote_public_key: The public key of the source. Returns: An rdf_flows.GrrMessage.AuthorizationState. Raises: DecryptionError: if the message is corrupt.
3,635
async def parse_target(self, runtime, target_str): pipeline_parts = target_str.split(RULE_SEPARATOR) module = await self.resolve_module(runtime, pipeline_parts[0], target_str) rules = [] for part in pipeline_parts[1:]: rule = await self.resolve_rule(runtime, part) rules.append(rule) return module, tuple(rules)
A target is a pipeline of a module into zero or more rules, and each module and rule can itself be scoped with zero or more module names.
3,636
def main(argv): source, target, tag = argv if "a" in tag: bump = "alpha" if "b" in tag: bump = "beta" else: bump = find_bump(target, tag) filename = "{}.md".format(tag) destination = copy(join(source, filename), target) build_hugo_md(destination, tag, bump)
Identify the release type and create a new target file with TOML header. Requires three arguments.
3,637
def transform_properties(properties, schema): new_properties = properties.copy() for prop_value, (prop_name, prop_type) in zip(new_properties.values(), schema["properties"].items()): if prop_value is None: continue elif prop_type == "time": new_properties[prop_name] = parse_date(prop_value).time() elif prop_type == "date": new_properties[prop_name] = parse_date(prop_value).date() elif prop_type == "datetime": new_properties[prop_name] = parse_date(prop_value) return new_properties
Transform properties types according to a schema. Parameters ---------- properties : dict Properties to transform. schema : dict Fiona schema containing the types.
3,638
def pointer_gate(num_qubits, U): ptr_bits = int(floor(np.log2(num_qubits))) data_bits = num_qubits - ptr_bits ptr_state = 0 assert ptr_bits > 0 program = pq.Program() program.defgate("CU", controlled(ptr_bits, U)) for _, target_qubit, changed in gray(ptr_bits): if changed is None: for ptr_qubit in range(num_qubits - ptr_bits, num_qubits): program.inst(X(ptr_qubit)) ptr_state ^= 1 << (ptr_qubit - data_bits) else: program.inst(X(data_bits + changed)) ptr_state ^= 1 << changed if target_qubit < data_bits: control_qubits = tuple(data_bits + i for i in range(ptr_bits)) program.inst(("CU",) + control_qubits + (target_qubit,)) fixup(program, data_bits, ptr_bits, ptr_state) return program
Make a pointer gate on `num_qubits`. The one-qubit gate U will act on the qubit addressed by the pointer qubits interpreted as an unsigned binary integer. There are P = floor(lg(num_qubits)) pointer qubits, and qubits numbered N - 1 N - 2 ... N - P are those reserved to represent the pointer. The first N - P qubits are the qubits which the one-qubit gate U can act on.
3,639
def configure(self, config): credentials = CredentialParams.many_from_config(config) for credential in credentials: self._credentials.append(credential)
Configures component by passing configuration parameters. :param config: configuration parameters to be set.
3,640
def sample_std(self, f, *args, **kwargs): r vals = self.sample_f(f, *args, **kwargs) return _np.std(vals, axis=0)
r"""Sample standard deviation of numerical method f over all samples Calls f(\*args, \*\*kwargs) on all samples and computes the standard deviation. f must return a numerical value or an ndarray. Parameters ---------- f : method reference or name (str) Model method to be evaluated for each model sample args : arguments Non-keyword arguments to be passed to the method in each call kwargs : keyword-argments Keyword arguments to be passed to the method in each call Returns ------- std : float or ndarray standard deviation or array of standard deviations
3,641
def package(self, value): if value is not None: assert type(value) is unicode, " attribute: type is not !".format( "package", value) self.__package = value
Setter for **self.__package** attribute. :param value: Attribute value. :type value: unicode
3,642
def get_bitcoind_config(config_file=None, impl=None): loaded = False bitcoind_server = None bitcoind_port = None bitcoind_user = None bitcoind_passwd = None bitcoind_timeout = None bitcoind_regtest = None bitcoind_p2p_port = None bitcoind_spv_path = None regtest = None if config_file is not None: parser = SafeConfigParser() parser.read(config_file) if parser.has_section(): if parser.has_option(, ): bitcoind_server = parser.get(, ) if parser.has_option(, ): bitcoind_port = int(parser.get(, )) if parser.has_option(, ): bitcoind_p2p_port = int(parser.get(, )) if parser.has_option(, ): bitcoind_user = parser.get(, ) if parser.has_option(, ): bitcoind_passwd = parser.get(, ) if parser.has_option(, ): bitcoind_spv_path = parser.get(, ) if parser.has_option(, ): regtest = parser.get(, ) else: regtest = if parser.has_option(, ): bitcoind_timeout = float(parser.get(, )) if regtest.lower() in ["yes", "y", "true", "1", "on"]: bitcoind_regtest = True else: bitcoind_regtest = False loaded = True if not loaded: bitcoind_server = bitcoind_port = 8332 bitcoind_user = bitcoind_passwd = bitcoind_regtest = False bitcoind_timeout = 300 bitcoind_p2p_port = 8333 bitcoind_spv_path = os.path.expanduser("~/.virtualchain-spv-headers.dat") default_bitcoin_opts = { "bitcoind_user": bitcoind_user, "bitcoind_passwd": bitcoind_passwd, "bitcoind_server": bitcoind_server, "bitcoind_port": bitcoind_port, "bitcoind_timeout": bitcoind_timeout, "bitcoind_regtest": bitcoind_regtest, "bitcoind_p2p_port": bitcoind_p2p_port, "bitcoind_spv_path": bitcoind_spv_path } return default_bitcoin_opts
Set bitcoind options globally. Call this before trying to talk to bitcoind.
3,643
def compute_bgband (evtpath, srcreg, bkgreg, ebins, env=None): import numpy as np import pandas as pd from scipy.special import erfcinv, gammaln if env is None: from . import CiaoEnvironment env = CiaoEnvironment () srcarea = get_region_area (env, evtpath, srcreg) bkgarea = get_region_area (env, evtpath, bkgreg) srccounts = [count_events (env, evtpath, % (srcreg, elo, ehi)) for elo, ehi in ebins] bkgcounts = [count_events (env, evtpath, % (bkgreg, elo, ehi)) for elo, ehi in ebins] df = pd.DataFrame ({ : [t[0] for t in ebins], : [t[1] for t in ebins], : srccounts, : bkgcounts }) df[] = np.abs (df[] - df[]) df[] = df[] * srcarea / bkgarea df[] = df[] * np.log (df[]) - df[] - gammaln (df[] + 1) df[] = np.sqrt (2) * erfcinv (np.exp (df[])) df[] = df[] - df[] return df
Compute background information for a source in one or more energy bands. evtpath Path to a CIAO events file srcreg String specifying the source region to consider; use 'region(path.reg)' if you have the region saved in a file. bkgreg String specifying the background region to consider; same format as srcreg ebins Iterable of 2-tuples giving low and high bounds of the energy bins to consider, measured in eV. env An optional CiaoEnvironment instance; default settings are used if unspecified. Returns a DataFrame containing at least the following columns: elo The low bound of this energy bin, in eV. ehi The high bound of this energy bin, in eV. ewidth The width of the bin in eV; simply `abs(ehi - elo)`. nsrc The number of events within the specified source region and energy range. nbkg The number of events within the specified background region and energy range. nbkg_scaled The number of background events scaled to the source area; not an integer. nsrc_subbed The estimated number of non-background events in the source region; simply `nsrc - nbkg_scaled`. log_prob_bkg The logarithm of the probability that all counts in the source region are due to background events. src_sigma The confidence of source detection in sigma inferred from log_prob_bkg. The probability of backgrounditude is computed as: b^s * exp (-b) / s! where `b` is `nbkg_scaled` and `s` is `nsrc`. The confidence of source detection is computed as: sqrt(2) * erfcinv (prob_bkg) where `erfcinv` is the inverse complementary error function.
3,644
def common_log(environ, response, response_time=None): logger = logging.getLogger() if response_time: formatter = ApacheFormatter(with_response_time=True) try: log_entry = formatter(response.status_code, environ, len(response.content), rt_us=response_time) except TypeError: log_entry = formatter(response.status_code, environ, len(response.content), rt_ms=response_time) else: formatter = ApacheFormatter(with_response_time=False) log_entry = formatter(response.status_code, environ, len(response.content)) logger.info(log_entry) return log_entry
Given the WSGI environ and the response, log this event in Common Log Format.
3,645
def to_repr(self: Variable, values, brackets1d: Optional[bool] = False) \ -> str: prefix = f if isinstance(values, str): string = f elif self.NDIM == 0: string = f elif self.NDIM == 1: if brackets1d: string = objecttools.assignrepr_list(values, prefix, 72) + else: string = objecttools.assignrepr_values( values, prefix, 72) + else: string = objecttools.assignrepr_list2(values, prefix, 72) + return .join(self.commentrepr + [string])
Return a valid string representation for the given |Variable| object. Function |to_repr| it thought for internal purposes only, more specifically for defining string representations of subclasses of class |Variable| like the following: >>> from hydpy.core.variabletools import to_repr, Variable >>> class Var(Variable): ... NDIM = 0 ... TYPE = int ... __hydpy__connect_variable2subgroup__ = None ... initinfo = 1.0, False >>> var = Var(None) >>> var.value = 2 >>> var var(2) The following examples demonstrate all covered cases. Note that option `brackets1d` allows choosing between a "vararg" and an "iterable" string representation for 1-dimensional variables (the first one being the default): >>> print(to_repr(var, 2)) var(2) >>> Var.NDIM = 1 >>> var = Var(None) >>> var.shape = 3 >>> print(to_repr(var, range(3))) var(0, 1, 2) >>> print(to_repr(var, range(3), True)) var([0, 1, 2]) >>> print(to_repr(var, range(30))) var(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29) >>> print(to_repr(var, range(30), True)) var([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29]) >>> Var.NDIM = 2 >>> var = Var(None) >>> var.shape = (2, 3) >>> print(to_repr(var, [range(3), range(3, 6)])) var([[0, 1, 2], [3, 4, 5]]) >>> print(to_repr(var, [range(30), range(30, 60)])) var([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29], [30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59]])
3,646
def profile(self, frame, event, arg): if (self.events == None) or (event in self.events): frame_info = inspect.getframeinfo(frame) cp = (frame_info[0], frame_info[2], frame_info[1]) if self.codepoint_included(cp): objects = muppy.get_objects() size = muppy.get_size(objects) if cp not in self.memories: self.memories[cp] = [0,0,0,0] self.memories[cp][0] = 1 self.memories[cp][1] = size self.memories[cp][2] = size else: self.memories[cp][0] += 1 if self.memories[cp][1] > size: self.memories[cp][1] = size if self.memories[cp][2] < size: self.memories[cp][2] = size
Profiling method used to profile matching codepoints and events.
3,647
def expire_at(self, key, _time): return self._client.expireat(self.get_key(key), round(_time))
Sets the expiration time of @key to @_time @_time: absolute Unix timestamp (seconds since January 1, 1970)
3,648
def parse_timespan(timedef): if isinstance(timedef, int): return timedef converter_order = (, , , , ) converters = { : 604800, : 86400, : 3600, : 60, : 1 } timedef = timedef.lower() if timedef.isdigit(): return int(timedef) elif len(timedef) == 0: return 0 seconds = -1 for spec in converter_order: timedef = timedef.split(spec) if len(timedef) == 1: timedef = timedef[0] continue elif len(timedef) > 2 or not timedef[0].isdigit(): seconds = -1 break adjustment = converters[spec] seconds = max(seconds, 0) seconds += (int(timedef[0]) * adjustment) timedef = timedef[1] if not len(timedef): break if seconds < 0: raise ValueError() return seconds
Convert a string timespan definition to seconds, for example converting '1m30s' to 90. If *timedef* is already an int, the value will be returned unmodified. :param timedef: The timespan definition to convert to seconds. :type timedef: int, str :return: The converted value in seconds. :rtype: int
3,649
def get(self, entry): if self.apiVersion == 1: path = self.secretsmount + + entry else: path = self.secretsmount + + entry proj = yield self._http.get(.format(path)) code = yield proj.code if code != 200: raise KeyError("The key %s does not exist in Vault provider: request" " return code:%d." % (entry, code)) json = yield proj.json() if self.apiVersion == 1: ret = json.get(, {}).get() else: ret = json.get(, {}).get(, {}).get() return ret
get the value from vault secret backend
3,650
def get_database_data(file_name=): if not os.path.exists(file_name): raise IOError("File {} does not exist!".format(file_name)) df = pd.read_csv(file_name, header=1) return df
return the energy (eV) and Sigma (barn) from the file_name Parameters: =========== file_name: string ('' by default) name of csv file Returns: ======== pandas dataframe Raises: ======= IOError if file does not exist
3,651
def addcomment(self, order_increment_id, status, comment=None, notify=False): if comment is None: comment = "" return bool(self.call( , [order_increment_id, status, comment, notify] ) )
Add comment to order or change its state :param order_increment_id: Order ID TODO: Identify possible values for status
3,652
def do_catch_fedora_errors(parser, token): END_TAGS = (, , ) blocks = {} blocks[] = parser.parse(END_TAGS) token = parser.next_token() while token.contents != : current_block = str(token.contents) if current_block in blocks: raise template.TemplateSyntaxError( current_block + ) blocks[current_block] = parser.parse(END_TAGS) token = parser.next_token() return CatchFedoraErrorsNode(**blocks)
Catches fedora errors between ``{% fedora_access %}`` and ``{% end_fedora_access %}``. Template designers may specify optional ``{% permission_denied %}`` and ``{% fedora_failed %}`` sections with fallback content in case of permission or other errors while rendering the main block. Note that when Django's ``TEMPLATE_DEBUG`` setting is on, it precludes all error handling and displays the Django exception screen for all errors, including fedora errors, even if you use this template tag. Turn off ``TEMPLATE_DEBUG`` if that debug screen is getting in the way of the use of {% fedora_access %}.
3,653
def _pre_run_checks(self, stream=sys.stdout, dry_run=False): input_missing = self.check_input_files(return_found=False) if input_missing: if dry_run: stream.write("Input files are missing: %s: %i\n" % (self.linkname, len(input_missing))) else: print (self.args) raise OSError("Input files are missing: %s" % input_missing) output_found, output_missing = self.check_output_files() if output_found and not output_missing: stream.write("All output files for %s already exist: %i %i %i\n" % (self.linkname, len(output_found), len(output_missing), len(self.files.output_files))) if dry_run: pass else: pass return True
Do some checks before running this link This checks if input and output files are present. If input files are missing this will raise `OSError` if dry_run is False If all output files are present this return False. Parameters ----------- stream : `file` Stream that this function will print to, Must have 'write' function dry_run : bool Print command but do not run it Returns ------- status : bool True if it is ok to proceed with running the link
3,654
def generate_nucmer_commands( filenames, outdir=".", nucmer_exe=pyani_config.NUCMER_DEFAULT, filter_exe=pyani_config.FILTER_DEFAULT, maxmatch=False, ): nucmer_cmdlines, delta_filter_cmdlines = [], [] for idx, fname1 in enumerate(filenames[:-1]): for fname2 in filenames[idx + 1 :]: ncmd, dcmd = construct_nucmer_cmdline( fname1, fname2, outdir, nucmer_exe, filter_exe, maxmatch ) nucmer_cmdlines.append(ncmd) delta_filter_cmdlines.append(dcmd) return (nucmer_cmdlines, delta_filter_cmdlines)
Return a tuple of lists of NUCmer command-lines for ANIm The first element is a list of NUCmer commands, the second a list of delta_filter_wrapper.py commands. These are ordered such that commands are paired. The NUCmer commands should be run before the delta-filter commands. - filenames - a list of paths to input FASTA files - outdir - path to output directory - nucmer_exe - location of the nucmer binary - maxmatch - Boolean flag indicating to use NUCmer's -maxmatch option Loop over all FASTA files generating NUCmer command lines for each pairwise comparison.
3,655
def os_volumes(self): if not self.__os_volumes: self.__os_volumes = OsVolumes(self.__connection) return self.__os_volumes
Gets the OS Volumes API client. Returns: OsVolumes:
3,656
def init(self, scope): schemas = self.schemas().values() for schema in schemas: scope[schema.name()] = schema.model()
Loads the models from the orb system into the inputted scope. :param scope | <dict> autoGenerate | <bool> schemas | [<orb.TableSchema>, ..] || None database | <str> || None
3,657
def main(): parser = argparse.ArgumentParser(description=) parser.add_argument(, "--add", nargs=3, action=parse_add_loopback(), help="add a Windows loopback adapter") parser.add_argument("-r", "--remove", action="store", help="remove a Windows loopback adapter") try: args = parser.parse_args() except argparse.ArgumentTypeError as e: raise SystemExit(e) devcon_path = shutil.which("devcon") if not devcon_path: raise SystemExit("Could not find devcon.exe") from win32com.shell import shell if not shell.IsUserAnAdmin(): raise SystemExit("You must run this script as an administrator") try: if args.add: add_loopback(devcon_path, args.add[0], args.add[1], args.add[2]) if args.remove: remove_loopback(devcon_path, args.remove) except SystemExit as e: print(e) os.system("pause")
Entry point for the Windows loopback tool.
3,658
def is_correct(self): state = True if self.command_name.startswith(): parameters = self.command_line.split() if len(parameters) < 2: self.command_name = "_internal_host_check;0;Host assumed to be UP" self.add_warning("[%s::%s] has no defined state nor output. Changed to %s" % (self.my_type, self.command_name, self.command_name)) elif len(parameters) < 3: state = 3 try: state = int(parameters[1]) except ValueError: self.add_warning("[%s::%s] required a non integer state: %s. Using 3." % (self.my_type, self.command_name, parameters[1])) if state > 4: self.add_warning("[%s::%s] required an impossible state: %d. Using 3." % (self.my_type, self.command_name, state)) output = {0: "UP", 1: "DOWN", 2: "DOWN", 3: "UNKNOWN", 4: "UNREACHABLE", }[state] self.command_name = "_internal_host_check;Host assumed to be %s" % output self.add_warning("[%s::%s] has no defined output. Changed to %s" % (self.my_type, self.command_name, self.command_name)) elif len(parameters) > 3: self.command_name = "%s;%s;%s" % (parameters[0], parameters[1], parameters[2]) self.add_warning("[%s::%s] has too many parameters. Changed to %s" % (self.my_type, self.command_name, self.command_name)) return super(Command, self).is_correct() and state
Check if this object configuration is correct :: * Check our own specific properties * Call our parent class is_correct checker :return: True if the configuration is correct, otherwise False :rtype: bool
3,659
def convert_loguniform_categorical(self, challenger_dict): converted_dict = {} for key, value in challenger_dict.items(): if key in self.loguniform_key: converted_dict[key] = np.exp(challenger_dict[key]) elif key in self.categorical_dict: idx = challenger_dict[key] converted_dict[key] = self.categorical_dict[key][idx] else: converted_dict[key] = value return converted_dict
Convert the values of type `loguniform` back to their initial range Also, we convert categorical: categorical values in search space are changed to list of numbers before, those original values will be changed back in this function Parameters ---------- challenger_dict: dict challenger dict Returns ------- dict dict which stores copy of challengers
3,660
def save_prov_to_files(self, showattributes=False): self.doc.add_bundle(self.bundle) ttl_file = os.path.join(self.export_dir, ) ttl_txt = self.doc.serialize(format=, rdf_format=) ttl_txt, json_context = self.use_prefixes(ttl_txt) for namespace in self.doc._namespaces.get_registered_namespaces(): json_context[namespace._prefix] = namespace._uri for namespace in \ list(self.doc._namespaces._default_namespaces.values()): json_context[namespace._prefix] = namespace._uri json_context["xsd"] = "http://www.w3.org/2000/01/rdf-schema ttl_txt = ttl_txt.replace(, ) with open(ttl_file, ) as ttl_fid: ttl_fid.write(ttl_txt) jsonld_file = os.path.join(self.export_dir, ) jsonld_txt = self.doc.serialize(format=, rdf_format=, context=json_context) with open(jsonld_file, ) as jsonld_fid: jsonld_fid.write(jsonld_txt) if not self.zipped: os.rename(self.export_dir, self.out_dir) else: os.chdir(self.export_dir) zf = zipfile.ZipFile(os.path.join("..", self.out_dir), mode=) try: for root, dirnames, filenames in os.walk("."): for filename in filenames: zf.write(os.path.join(filename)) finally: zf.close() os.chdir("..") shutil.rmtree(os.path.join("..", self.export_dir))
Write-out provn serialisation to nidm.provn.
3,661
def sync_model(self, comment=, compact_central=False, release_borrowed=True, release_workset=True, save_local=False): self._add_entry(templates.FILE_SYNC_START) if compact_central: self._add_entry(templates.FILE_SYNC_COMPACT) if release_borrowed: self._add_entry(templates.FILE_SYNC_RELEASE_BORROWED) if release_workset: self._add_entry(templates.FILE_SYNC_RELEASE_USERWORKSETS) if save_local: self._add_entry(templates.FILE_SYNC_RELEASE_SAVELOCAL) self._add_entry(templates.FILE_SYNC_COMMENT_OK .format(sync_comment=comment))
Append a sync model entry to the journal. This instructs Revit to sync the currently open workshared model. Args: comment (str): comment to be provided for the sync step compact_central (bool): if True compacts the central file release_borrowed (bool): if True releases the borrowed elements release_workset (bool): if True releases the borrowed worksets save_local (bool): if True saves the local file as well
3,662
def play_station(self, station): for song in iterate_forever(station.get_playlist): try: self.play(song) except StopIteration: self.stop() return
Play the station until something ends it This function will run forever until termintated by calling end_station.
3,663
def create_device(name, role, model, manufacturer, site): try: nb_role = get_(, , name=role) if not nb_role: return False nb_type = get_(, , model=model) if not nb_type: return False nb_site = get_(, , name=site) if not nb_site: return False status = {: "Active", : 1} except RequestError as e: log.error(, e.req.request.headers, e.request_body, e.error) return False payload = {: name, : name, : slugify(name), : nb_type[], : nb_role[], : nb_site[]} new_dev = _add(, , payload) if new_dev: return {: {: payload}} else: return False
.. versionadded:: 2019.2.0 Create a new device with a name, role, model, manufacturer and site. All these components need to be already in Netbox. name The name of the device, e.g., ``edge_router`` role String of device role, e.g., ``router`` model String of device model, e.g., ``MX480`` manufacturer String of device manufacturer, e.g., ``Juniper`` site String of device site, e.g., ``BRU`` CLI Example: .. code-block:: bash salt myminion netbox.create_device edge_router router MX480 Juniper BRU
3,664
def get_controller_state(self): dpos = self.control[:3] * 0.005 roll, pitch, yaw = self.control[3:] * 0.005 self.grasp = self.control_gripper drot1 = rotation_matrix(angle=-pitch, direction=[1., 0, 0], point=None)[:3, :3] drot2 = rotation_matrix(angle=roll, direction=[0, 1., 0], point=None)[:3, :3] drot3 = rotation_matrix(angle=yaw, direction=[0, 0, 1.], point=None)[:3, :3] self.rotation = self.rotation.dot(drot1.dot(drot2.dot(drot3))) return dict( dpos=dpos, rotation=self.rotation, grasp=self.grasp, reset=self._reset_state )
Returns the current state of the 3d mouse, a dictionary of pos, orn, grasp, and reset.
3,665
def hideEvent(self, event): super(CallTipWidget, self).hideEvent(event) self._text_edit.cursorPositionChanged.disconnect( self._cursor_position_changed) self._text_edit.removeEventFilter(self)
Reimplemented to disconnect signal handlers and event filter.
3,666
def _init_params(self, inputs, overwrite=False): inputs = [x if isinstance(x, DataDesc) else DataDesc(*x) for x in inputs] input_shapes = {item.name: item.shape for item in inputs} arg_shapes, _, aux_shapes = self.symbol.infer_shape(**input_shapes) assert arg_shapes is not None input_dtypes = {item.name: item.dtype for item in inputs} arg_dtypes, _, aux_dtypes = self.symbol.infer_type(**input_dtypes) assert arg_dtypes is not None arg_names = self.symbol.list_arguments() input_names = input_shapes.keys() param_names = [key for key in arg_names if key not in input_names] aux_names = self.symbol.list_auxiliary_states() param_name_attrs = [x for x in zip(arg_names, arg_shapes, arg_dtypes) if x[0] in param_names] arg_params = {k : nd.zeros(shape=s, dtype=t) for k, s, t in param_name_attrs} aux_name_attrs = [x for x in zip(aux_names, aux_shapes, aux_dtypes) if x[0] in aux_names] aux_params = {k : nd.zeros(shape=s, dtype=t) for k, s, t in aux_name_attrs} for k, v in arg_params.items(): if self.arg_params and k in self.arg_params and (not overwrite): arg_params[k][:] = self.arg_params[k][:] else: self.initializer(k, v) for k, v in aux_params.items(): if self.aux_params and k in self.aux_params and (not overwrite): aux_params[k][:] = self.aux_params[k][:] else: self.initializer(k, v) self.arg_params = arg_params self.aux_params = aux_params return (arg_names, list(param_names), aux_names)
Initialize weight parameters and auxiliary states.
3,667
def _forward_mode(self, *args): X: np.ndarray dX: np.ndarray X, dX = self.f._forward_mode(*args) p: float = self.p val = X ** p diff = p * X ** (p-1) * dX return (val, diff)
Forward mode differentiation for a constant
3,668
def filter(self, filters): new_elements = [ e for e in self.elements if all(function(e) for function in filters)] return Pileup(self.locus, new_elements)
Apply filters to the pileup elements, and return a new Pileup with the filtered elements removed. Parameters ---------- filters : list of PileupElement -> bool callables A PileupUp element is retained if all filters return True when called on it.
3,669
def load(self, model_file, save_dir, verbose=True): if not os.path.exists(save_dir): self.logger.error("Loading failed... Directory does not exist.") try: checkpoint = torch.load(f"{save_dir}/{model_file}") except BaseException: self.logger.error( f"Loading failed... Cannot load model from {save_dir}/{model_file}" ) self.load_state_dict(checkpoint["model"]) self.settings = checkpoint["config"] self.cardinality = checkpoint["cardinality"] self.name = checkpoint["name"] if verbose: self.logger.info( f"[{self.name}] Model loaded as {model_file} in {save_dir}" )
Load model from file and rebuild the model. :param model_file: Saved model file name. :type model_file: str :param save_dir: Saved model directory. :type save_dir: str :param verbose: Print log or not :type verbose: bool
3,670
def safe_call(self, kwargs, args=None): raise return str(exc)
Call the underlying function safely, given a set of keyword arguments. If successful, the function return value (likely None) will be returned. If the underlying function raises an exception, the return value will be the exception message, unless an argparse Namespace object defining a 'debug' attribute of True is provided; in this case, the exception will be re-raised. :param kwargs: A dictionary of keyword arguments to pass to the underlying function. :param args: If provided, this should be a Namespace object with a 'debug' attribute set to a boolean value. :returns: The function return value, or the string value of the exception raised by the function.
3,671
def _get_gecos(name): gecos_field = pwd.getpwnam(name).pw_gecos.split(, 3) if not gecos_field: return {} else: while len(gecos_field) < 4: gecos_field.append() return {: six.text_type(gecos_field[0]), : six.text_type(gecos_field[1]), : six.text_type(gecos_field[2]), : six.text_type(gecos_field[3])}
Retrieve GECOS field info and return it in dictionary form
3,672
def _merge_dicts(first, second): new = deepcopy(first) for k, v in second.items(): if isinstance(v, dict) and v: ret = _merge_dicts(new.get(k, dict()), v) new[k] = ret else: new[k] = second[k] return new
Merge the 'second' multiple-dictionary into the 'first' one.
3,673
def random(cls, engine_or_session, limit=5): ses, auto_close = ensure_session(engine_or_session) result = ses.query(cls).order_by(func.random()).limit(limit).all() if auto_close: ses.close() return result
Return random ORM instance. :type engine_or_session: Union[Engine, Session] :type limit: int :rtype: List[ExtendedBase]
3,674
def swd_read8(self, offset): value = self._dll.JLINK_SWD_GetU8(offset) return ctypes.c_uint8(value).value
Gets a unit of ``8`` bits from the input buffer. Args: self (JLink): the ``JLink`` instance offset (int): the offset (in bits) from which to start reading Returns: The integer read from the input buffer.
3,675
def scatter_plot(self, ax, topic_dims, t=None, ms_limits=True, **kwargs_plot): plot_specs = {: , : } plot_specs.update(kwargs_plot) data = self.data_t(topic_dims, t) ax.plot(*(data.T), **plot_specs) if ms_limits: ax.axis(self.axes_limits(topic_dims))
2D or 3D scatter plot. :param axes ax: matplotlib axes (use Axes3D if 3D data) :param tuple topic_dims: list of (topic, dims) tuples, where topic is a string and dims is a list of dimensions to be plotted for that topic. :param int t: time indexes to be plotted :param dict kwargs_plot: argument to be passed to matplotlib's plot function, e.g. the style of the plotted points 'or' :param bool ms_limits: if set to True, automatically set axes boundaries to the sensorimotor boundaries (default: True)
3,676
def humanize_time(secs): if secs is None: return if secs < 1: return "{:.2f}ms".format(secs*1000) elif secs < 10: return "{:.2f}s".format(secs) else: mins, secs = divmod(secs, 60) hours, mins = divmod(mins, 60) return .format(int(hours), int(mins), int(secs))
convert second in to hh:mm:ss format
3,677
def send_message(self, message): with self._instance_lock: if message is None: Global.LOGGER.error("cant deliver anonymous messages with body {message.body}") return if message.receiver is None: Global.LOGGER.error( f"cant deliver message with no body from {message.sender}") return sender = "*" + message.sender + "*" self.socket.send_multipart( [bytes(sender, ), pickle.dumps(message)]) if Global.CONFIG_MANAGER.tracing_mode: Global.LOGGER.debug("dispatched : " + message.sender + "-" + message.message + "-" + message.receiver) self.dispatched = self.dispatched + 1
Dispatch a message using 0mq
3,678
def get_names_owned_by_address(address, proxy=None, hostport=None): assert proxy or hostport, if proxy is None: proxy = connect_hostport(hostport) owned_schema = { : , : { : { : , : { : , : True }, }, }, : [ , ], } schema = json_response_schema( owned_schema ) resp = {} try: resp = proxy.get_names_owned_by_address(address) resp = json_validate(schema, resp) if json_is_error(resp): return resp for n in resp[]: assert is_name_valid(str(n)), (.format(str(n))) except ValidationError as ve: if BLOCKSTACK_DEBUG: log.exception(ve) resp = {: , : 502} return resp except AssertionError as e: if BLOCKSTACK_DEBUG: log.exception(e) resp = {: } return resp except socket.timeout: log.error("Connection timed out") resp = {: , : 503} return resp except socket.error as se: log.error("Connection error {}".format(se.errno)) resp = {: , : 502} return resp except Exception as ee: if BLOCKSTACK_DEBUG: log.exception(ee) log.error("Caught exception while connecting to Blockstack node: {}".format(ee)) resp = {: , : 500} return resp return resp[]
Get the names owned by an address. Returns the list of names on success Returns {'error': ...} on error
3,679
def get_eval_func(obj, feature, slice=np.s_[...]): if feature is not None: if not isinstance(feature, InducingFeature) or not isinstance(obj, kernels.Kernel): raise TypeError("If `feature` is supplied, `obj` must be a kernel.") return lambda x: tf.transpose(Kuf(feature, obj, x))[slice] elif isinstance(obj, mean_functions.MeanFunction): return lambda x: obj(x)[slice] elif isinstance(obj, kernels.Kernel): return lambda x: obj.Kdiag(x) else: raise NotImplementedError()
Return the function of interest (kernel or mean) for the expectation depending on the type of :obj: and whether any features are given
3,680
def frame_received_cb(self, frame): PYVLXLOG.debug("REC: %s", frame) for frame_received_cb in self.frame_received_cbs: self.loop.create_task(frame_received_cb(frame))
Received message.
3,681
def pop(self): stack = getattr(self._local, "stack", None) if stack is None: return None elif len(stack) == 1: release_local(self._local) return stack[-1] else: return stack.pop()
Removes the topmost item from the stack, will return the old value or `None` if the stack was already empty.
3,682
def from_ymd_to_excel(year, month, day): if not is_valid_ymd(year, month, day): raise ValueError("Invalid date {0}.{1}.{2}".format(year, month, day)) days = _cum_month_days[month - 1] + day days += 1 if (is_leap_year(year) and month > 2) else 0 years_distance = year - 1900 days += years_distance * 365 + \ (years_distance + 3) // 4 - (years_distance + 99) // 100 + (years_distance + 299) // 400 days += 1 if (year, month, day) > (1900, 2, 28) else 0 return days
converts date as `(year, month, day)` tuple into Microsoft Excel representation style :param tuple(int, int, int): int tuple `year, month, day` :return int:
3,683
def warn(message, category=None, stacklevel=1, emitstacklevel=1): if isinstance(message, Warning): category = message.__class__ if category is None: category = UserWarning if not (isinstance(category, type) and issubclass(category, Warning)): raise TypeError("category must be a Warning subclass, " "not ".format(type(category).__name__)) try: frame = _get_stack_frame(stacklevel) except ValueError: globals = sys.__dict__ lineno = 1 else: globals = frame.f_globals lineno = frame.f_lineno try: eframe = _get_stack_frame(emitstacklevel) except ValueError: eglobals = sys.__dict__ else: eglobals = eframe.f_globals if in eglobals: emodule = eglobals[] else: emodule = "<string>" if in globals: module = globals[] else: module = "<string>" filename = globals.get() if filename: fnl = filename.lower() if fnl.endswith(".pyc"): filename = filename[:-1] else: if module == "__main__": try: filename = sys.argv[0] except AttributeError: if not filename: filename = module registry = globals.setdefault("__warningregistry__", {}) warn_explicit(message, category, filename, lineno, module, registry, globals, emit_module=emodule)
Issue a warning, or maybe ignore it or raise an exception. Duplicate of the standard library warn function except it takes the following argument: `emitstacklevel` : default to 1, number of stackframe to consider when matching the module that emits this warning.
3,684
def interpret(self, values={}, functions={}): return self.expr.evaluate(Environment(values, functions))
Like `substitute`, but forces the interpreter (rather than the compiled version) to be used. The interpreter includes exception-handling code for missing variables and buggy template functions but is much slower.
3,685
def _group_paths_without_options(cls, line_parse_result): active_pathspecs = set() for group in line_parse_result: active_pathspecs.add(group[]) has_options = ( in group or in group or in group ) if has_options: yield active_pathspecs, group active_pathspecs = set() if active_pathspecs: yield active_pathspecs, {}
Given a parsed options specification as a list of groups, combine groups without options with the first subsequent group which has options. A line of the form 'A B C [opts] D E [opts_2]' results in [({A, B, C}, [opts]), ({D, E}, [opts_2])]
3,686
def SetTimeZone(self, time_zone): try: self._time_zone = pytz.timezone(time_zone) except (AttributeError, pytz.UnknownTimeZoneError): raise ValueError(.format(time_zone))
Sets the time zone. Args: time_zone (str): time zone. Raises: ValueError: if the timezone is not supported.
3,687
def follow(name: str) -> snug.Query[bool]: request = snug.PUT(f) response = yield request return response.status_code == 204
follow another user
3,688
def Parse(self, statentry, file_object, knowledge_base): _ = knowledge_base kwargs = {} try: kwargs["aff4path"] = file_object.urn except AttributeError: pass direct_copy_items = [ "Label", "Disabled", "UserName", "GroupName", "Program", "StandardInPath", "StandardOutPath", "StandardErrorPath", "LimitLoadToSessionType", "EnableGlobbing", "EnableTransactions", "OnDemand", "RunAtLoad", "RootDirectory", "WorkingDirectory", "Umask", "TimeOut", "ExitTimeOut", "ThrottleInterval", "InitGroups", "StartOnMount", "StartInterval", "Debug", "WaitForDebugger", "Nice", "ProcessType", "AbandonProcessGroup", "LowPriorityIO", "LaunchOnlyOnce" ] string_array_items = [ "LimitLoadToHosts", "LimitLoadFromHosts", "LimitLoadToSessionType", "ProgramArguments", "WatchPaths", "QueueDirectories" ] flag_only_items = ["SoftResourceLimits", "HardResourceLimits", "Sockets"] plist = {} try: plist = biplist.readPlist(file_object) except (biplist.InvalidPlistException, ValueError, IOError) as e: plist["Label"] = "Could not parse plist: %s" % e for key in direct_copy_items: kwargs[key] = plist.get(key) for key in string_array_items: elements = plist.get(key) if isinstance(elements, string_types): kwargs[key] = [elements] else: kwargs[key] = elements for key in flag_only_items: if plist.get(key): kwargs[key] = True if plist.get("inetdCompatability") is not None: kwargs["inetdCompatabilityWait"] = plist.get("inetdCompatability").get( "Wait") keepalive = plist.get("KeepAlive") if isinstance(keepalive, bool) or keepalive is None: kwargs["KeepAlive"] = keepalive else: keepalivedict = {} keepalivedict["SuccessfulExit"] = keepalive.get("SuccessfulExit") keepalivedict["NetworkState"] = keepalive.get("NetworkState") pathstates = keepalive.get("PathState") if pathstates is not None: keepalivedict["PathState"] = [] for pathstate in pathstates: keepalivedict["PathState"].append( rdf_plist.PlistBoolDictEntry( name=pathstate, value=pathstates[pathstate])) otherjobs = keepalive.get("OtherJobEnabled") if otherjobs is not None: keepalivedict["OtherJobEnabled"] = [] for otherjob in otherjobs: keepalivedict["OtherJobEnabled"].append( rdf_plist.PlistBoolDictEntry( name=otherjob, value=otherjobs[otherjob])) kwargs["KeepAliveDict"] = rdf_plist.LaunchdKeepAlive(**keepalivedict) envvars = plist.get("EnvironmentVariables") if envvars is not None: kwargs["EnvironmentVariables"] = [] for envvar in envvars: kwargs["EnvironmentVariables"].append( rdf_plist.PlistStringDictEntry(name=envvar, value=envvars[envvar])) startcalendarinterval = plist.get("StartCalendarInterval") if startcalendarinterval is not None: if isinstance(startcalendarinterval, dict): kwargs["StartCalendarInterval"] = [ rdf_plist.LaunchdStartCalendarIntervalEntry( Minute=startcalendarinterval.get("Minute"), Hour=startcalendarinterval.get("Hour"), Day=startcalendarinterval.get("Day"), Weekday=startcalendarinterval.get("Weekday"), Month=startcalendarinterval.get("Month")) ] else: kwargs["StartCalendarInterval"] = [] for entry in startcalendarinterval: kwargs["StartCalendarInterval"].append( rdf_plist.LaunchdStartCalendarIntervalEntry( Minute=entry.get("Minute"), Hour=entry.get("Hour"), Day=entry.get("Day"), Weekday=entry.get("Weekday"), Month=entry.get("Month"))) yield rdf_plist.LaunchdPlist(**kwargs)
Parse the Plist file.
3,689
def volume(self): volume = abs(self.primitive.polygon.area * self.primitive.height) return volume
The volume of the primitive extrusion. Calculated from polygon and height to avoid mesh creation. Returns ---------- volume: float, volume of 3D extrusion
3,690
def printer(self, message, color_level=): if self.job_args.get(): print(cloud_utils.return_colorized(msg=message, color=color_level)) else: print(message)
Print Messages and Log it. :param message: item to print to screen
3,691
def add_substitution(self, substitution): if substitution.personalization: try: personalization = \ self._personalizations[substitution.personalization] has_internal_personalization = True except IndexError: personalization = Personalization() has_internal_personalization = False personalization.add_substitution(substitution) if not has_internal_personalization: self.add_personalization( personalization, index=substitution.personalization) else: if isinstance(substitution, list): for s in substitution: for p in self.personalizations: p.add_substitution(s) else: for p in self.personalizations: p.add_substitution(substitution)
Add a substitution to the email :param value: Add a substitution to the email :type value: Substitution
3,692
def _GetDatabaseConfig(self): goodlogging.Log.Seperator() goodlogging.Log.Info("CLEAR", "Getting configuration variables...") goodlogging.Log.IncreaseIndent() if self._sourceDir is None: self._sourceDir = self._GetConfigValue(, ) if self._inPlaceRename is False and self._tvDir is None: self._tvDir = self._GetConfigValue(, ) self._archiveDir = self._GetConfigValue(, , isDir = False) self._supportedFormatsList = self._GetSupportedFormats() self._ignoredDirsList = self._GetIgnoredDirs() goodlogging.Log.NewLine() goodlogging.Log.Info("CLEAR", "Configuation is:") goodlogging.Log.IncreaseIndent() goodlogging.Log.Info("CLEAR", "Source directory = {0}".format(self._sourceDir)) goodlogging.Log.Info("CLEAR", "TV directory = {0}".format(self._tvDir)) goodlogging.Log.Info("CLEAR", "Supported formats = {0}".format(self._supportedFormatsList)) goodlogging.Log.Info("CLEAR", "Ignored directory list = {0}".format(self._ignoredDirsList)) goodlogging.Log.ResetIndent()
Get all configuration from database. This includes values from the Config table as well as populating lists for supported formats and ignored directories from their respective database tables.
3,693
def _green_worker(self): while not self.quit.is_set(): try: task = self.green_queue.get(timeout=1) timestamp, missile, marker = task planned_time = self.start_time + (timestamp / 1000.0) delay = planned_time - time.time() if delay > 0: time.sleep(delay) try: with self.instance_counter.get_lock(): self.instance_counter.value += 1 self.gun.shoot(missile, marker) finally: with self.instance_counter.get_lock(): self.instance_counter.value -= 1 self._free_threads_count += 1 except (KeyboardInterrupt, SystemExit): break except Empty: continue except Full: logger.warning("Couldns full") except Exception: logger.exception("Bfg shoot exception")
A worker that does actual jobs
3,694
def display_molecule(mol, autozoom=True): s = System([mol]) display_system(s, autozoom=True)
Display a `~chemlab.core.Molecule` instance in the viewer. This function wraps the molecule in a system before displaying it.
3,695
def parseGTF(inGTF): desc=attributesGTF(inGTF) ref=inGTF.copy() ref.reset_index(inplace=True, drop=True) df=ref.drop([],axis=1).copy() for d in desc: field=retrieve_GTF_field(d,ref) df=pd.concat([df,field],axis=1) return df
Reads an extracts all attributes in the attributes section of a GTF and constructs a new dataframe wiht one collumn per attribute instead of the attributes column :param inGTF: GTF dataframe to be parsed :returns: a dataframe of the orignal input GTF with attributes parsed.
3,696
def sql_fingerprint(query, hide_columns=True): parsed_query = parse(query)[0] sql_recursively_simplify(parsed_query, hide_columns=hide_columns) return str(parsed_query)
Simplify a query, taking away exact values and fields selected. Imperfect but better than super explicit, value-dependent queries.
3,697
def run(self, args): email = args.email username = args.username force_send = args.resend auth_role = args.auth_role msg_file = args.msg_file message = read_argument_file_contents(msg_file) print("Sharing project.") to_user = self.remote_store.lookup_or_register_user_by_email_or_username(email, username) try: project = self.fetch_project(args, must_exist=True, include_children=False) dest_email = self.service.share(project, to_user, force_send, auth_role, message) print("Share email message sent to " + dest_email) except D4S2Error as ex: if ex.warning: print(ex.message) else: raise
Gives user permission based on auth_role arg and sends email to that user. :param args Namespace arguments parsed from the command line
3,698
def from_string(contents): lines = [l.strip() for l in contents.split("\n")] link0_patt = re.compile(r"^(%.+)\s*=\s*(.+)") link0_dict = {} for i, l in enumerate(lines): if link0_patt.match(l): m = link0_patt.match(l) link0_dict[m.group(1).strip("=")] = m.group(2) route_patt = re.compile(r"^ route = "" route_index = None for i, l in enumerate(lines): if route_patt.match(l): route += " " + l route_index = i elif (l == "" or l.isspace()) and route_index: break functional, basis_set, route_paras, dieze_tag = read_route_line(route) ind = 2 title = [] while lines[route_index + ind].strip(): title.append(lines[route_index + ind].strip()) ind += 1 title = .join(title) ind += 1 toks = re.split(r"[,\s]+", lines[route_index + ind]) charge = int(toks[0]) spin_mult = int(toks[1]) coord_lines = [] spaces = 0 input_paras = {} ind += 1 for i in range(route_index + ind, len(lines)): if lines[i].strip() == "": spaces += 1 if spaces >= 2: d = lines[i].split("=") if len(d) == 2: input_paras[d[0]] = d[1] else: coord_lines.append(lines[i].strip()) mol = GaussianInput._parse_coords(coord_lines) mol.set_charge_and_spin(charge, spin_mult) return GaussianInput(mol, charge=charge, spin_multiplicity=spin_mult, title=title, functional=functional, basis_set=basis_set, route_parameters=route_paras, input_parameters=input_paras, link0_parameters=link0_dict, dieze_tag=dieze_tag)
Creates GaussianInput from a string. Args: contents: String representing an Gaussian input file. Returns: GaussianInput object
3,699
def access_token(self): if self.cache_token: return self.access_token_ or \ self._resolve_credential() return self.access_token_
Get access_token.