Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
5,900
def add_final_state(self, f): if f not in self.Q: LOG.error("The specified value is invalid, f must be a member of Q") raise InputError("The specified value is invalid, f must be a member of Q") self.F.add(f)
:param f: int , the state qi to be added to F, epsilon is conventionally defined as the last node (q_|S|)
5,901
def zipper(root_dir="", name="", path_name_ext=""): logger_zips.info("re_zip: name: {}, dir_tmp: {}".format(path_name_ext, root_dir)) shutil.make_archive(path_name_ext, format=, root_dir=root_dir, base_dir=name) os.rename("{}.zip".format(path_name_ext), path_name_ext) return
Zips up directory back to the original location :param str root_dir: Root directory of the archive :param str name: <datasetname>.lpd :param str path_name_ext: /path/to/filename.lpd
5,902
def _get_dependencies_of(name, location=None): if not location: detailed_dap_list = get_installed_daps_detailed() if name not in detailed_dap_list: return _get_api_dependencies_of(name) location = detailed_dap_list[name][0][] meta = .format(d=location, dap=name) try: data = yaml.load(open(meta), Loader=Loader) except IOError: return [] return data.get(, [])
Returns list of first level dependencies of the given installed dap or dap from Dapi if not installed If a location is specified, this only checks for dap installed in that path and return [] if the dap is not located there
5,903
def _to_dict(self): _dict = {} if hasattr(self, ) and self.element_pair is not None: _dict[] = [x._to_dict() for x in self.element_pair] if hasattr(self, ) and self.identical_text is not None: _dict[] = self.identical_text if hasattr(self, ) and self.provenance_ids is not None: _dict[] = self.provenance_ids if hasattr(self, ) and self.significant_elements is not None: _dict[] = self.significant_elements return _dict
Return a json dictionary representing this model.
5,904
def table_schema_call(self, target, cls): index_defs = [] for name in cls.index_names() or []: index_defs.append(GlobalIncludeIndex( gsi_name(name), parts=[HashKey(name)], includes=[] )) return target( cls.get_table_name(), connection=get_conn(), schema=[HashKey()], global_indexes=index_defs or None )
Perform a table schema call. We call the callable target with the args and keywords needed for the table defined by cls. This is how we centralize the Table.create and Table ctor calls.
5,905
def find_state_op_colocation_error(graph, reported_tags=None): state_op_types = list_registered_stateful_ops_without_inputs() state_op_map = {op.name: op for op in graph.get_operations() if op.type in state_op_types} for op in state_op_map.values(): for colocation_group in op.colocation_groups(): if not (colocation_group.startswith(tf.compat.as_bytes("loc:@")) and tf.compat.as_str_any(colocation_group[5:]) in state_op_map): tags_prefix = ("" if reported_tags is None else "in the graph for tags %s, " % reported_tags) return ( "A state-holding node x of a module%s%s', which counts as state-holding, " "but Operation.colocation_groups() == %s. " % (tags_prefix, op.name, op.type, op.colocation_groups())) return None
Returns error message for colocation of state ops, or None if ok.
5,906
def extract_geometry(dataset): alg = vtk.vtkGeometryFilter() alg.SetInputDataObject(dataset) alg.Update() return _get_output(alg)
Extract the outer surface of a volume or structured grid dataset as PolyData. This will extract all 0D, 1D, and 2D cells producing the boundary faces of the dataset.
5,907
def put( self, id, name, description, private, runs_executable_tasks, runs_docker_container_tasks, runs_singularity_container_tasks, active, whitelists, ): request_url = self._client.base_api_url + self.detail_url.format(id=id) data_to_put = { "name": name, "description": description, "private": private, "runs_executable_tasks": runs_executable_tasks, "runs_docker_container_tasks": runs_docker_container_tasks, "runs_singularity_container_tasks": runs_singularity_container_tasks, "active": active, "whitelists": whitelists, } response = self._client.session.put(request_url, data=data_to_put) self.validate_request_success( response_text=response.text, request_url=request_url, status_code=response.status_code, expected_status_code=HTTP_200_OK, ) return self.response_data_to_model_instance(response.json())
Updates a task queue on the saltant server. Args: id (int): The ID of the task queue. name (str): The name of the task queue. description (str): The description of the task queue. private (bool): A Booleon signalling whether the queue can only be used by its associated user. runs_executable_tasks (bool): A Boolean specifying whether the queue runs executable tasks. runs_docker_container_tasks (bool): A Boolean specifying whether the queue runs container tasks that run in Docker containers. runs_singularity_container_tasks (bool): A Boolean specifying whether the queue runs container tasks that run in Singularity containers. active (bool): A Booleon signalling whether the queue is active. whitelists (list): A list of task whitelist IDs. Returns: :class:`saltant.models.task_queue.TaskQueue`: A task queue model instance representing the task queue just updated.
5,908
def registered(self, driver, executorInfo, frameworkInfo, agentInfo): self.id = executorInfo.executor_id.get(, None) log.debug("Registered executor %s with framework", self.id) self.address = socket.gethostbyname(agentInfo.hostname) nodeInfoThread = threading.Thread(target=self._sendFrameworkMessage, args=[driver]) nodeInfoThread.daemon = True nodeInfoThread.start()
Invoked once the executor driver has been able to successfully connect with Mesos.
5,909
def argument(self, argument_dest, arg_type=None, **kwargs): self._check_stale() if not self._applicable(): return deprecate_action = self._handle_deprecations(argument_dest, **kwargs) if deprecate_action: kwargs[] = deprecate_action self.command_loader.argument_registry.register_cli_argument(self.command_scope, argument_dest, arg_type, **kwargs)
Register an argument for the given command scope using a knack.arguments.CLIArgumentType :param argument_dest: The destination argument to add this argument type to :type argument_dest: str :param arg_type: Predefined CLIArgumentType definition to register, as modified by any provided kwargs. :type arg_type: knack.arguments.CLIArgumentType :param kwargs: Possible values: `options_list`, `validator`, `completer`, `nargs`, `action`, `const`, `default`, `type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md.
5,910
def compute_metrics_cv(self, X, y, **kwargs): results = self.cv_score_mean(X, y) return results
Compute cross-validated metrics. Trains this model on data X with labels y. Returns a list of dict with keys name, scoring_name, value. Args: X (Union[np.array, pd.DataFrame]): data y (Union[np.array, pd.DataFrame, pd.Series]): labels
5,911
def update(self, infos): for info in infos: if isinstance(info, LearningGene): self.replicate(info)
Process received infos.
5,912
def make_mecard(name, reading=None, email=None, phone=None, videophone=None, memo=None, nickname=None, birthday=None, url=None, pobox=None, roomno=None, houseno=None, city=None, prefecture=None, zipcode=None, country=None): return segno.make_qr(make_mecard_data(name=name, reading=reading, email=email, phone=phone, videophone=videophone, memo=memo, nickname=nickname, birthday=birthday, url=url, pobox=pobox, roomno=roomno, houseno=houseno, city=city, prefecture=prefecture, zipcode=zipcode, country=country))
\ Returns a QR Code which encodes a `MeCard <https://en.wikipedia.org/wiki/MeCard>`_ :param str name: Name. If it contains a comma, the first part is treated as lastname and the second part is treated as forename. :param str|None reading: Designates a text string to be set as the kana name in the phonebook :param str|iterable email: E-mail address. Multiple values are allowed. :param str|iterable phone: Phone number. Multiple values are allowed. :param str|iterable videophone: Phone number for video calls. Multiple values are allowed. :param str memo: A notice for the contact. :param str nickname: Nickname. :param str|int|date birthday: Birthday. If a string is provided, it should encode the date as YYYYMMDD value. :param str|iterable url: Homepage. Multiple values are allowed. :param str|None pobox: P.O. box (address information). :param str|None roomno: Room number (address information). :param str|None houseno: House number (address information). :param str|None city: City (address information). :param str|None prefecture: Prefecture (address information). :param str|None zipcode: Zip code (address information). :param str|None country: Country (address information). :rtype: segno.QRCode
5,913
def setResult(self, value): self.setResultCaptureDate(DateTime()) val = str("" if not value and value != 0 else value).strip() if val and val[0] in [LDL, UDL]: oper = val[0] val = val.replace(oper, "", 1) try: val = float(val) except (ValueError, TypeError): val = value allow_manual = self.getAllowManualDetectionLimit() selector = self.getDetectionLimitSelector() if allow_manual or selector: self.setDetectionLimitSelector(True) self.setDetectionLimitOperand(oper) if not allow_manual: if oper == LDL: val = self.getLowerDetectionLimit() else: val = self.getUpperDetectionLimit() self.getField("Result").set(self, val)
Validate and set a value into the Result field, taking into account the Detection Limits. :param value: is expected to be a string.
5,914
def _build_payload(data): for k, v in iteritems(data): data[k] = _transform(v, key=(k,)) payload = { : SETTINGS[], : data } return payload
Returns the full payload as a string.
5,915
def asizeof(self, *objs, **opts): if opts: self.set(**opts) s, _ = self._sizes(objs, None) return s
Return the combined size of the given objects (with modified options, see method **set**).
5,916
def main(): authenticator = prawcore.TrustedAuthenticator( prawcore.Requestor("prawcore_script_auth_example"), os.environ["PRAWCORE_CLIENT_ID"], os.environ["PRAWCORE_CLIENT_SECRET"], ) authorizer = prawcore.ScriptAuthorizer( authenticator, os.environ["PRAWCORE_USERNAME"], os.environ["PRAWCORE_PASSWORD"], ) authorizer.refresh() with prawcore.session(authorizer) as session: data = session.request("GET", "/api/v1/me/friends") for friend in data["data"]["children"]: print(friend["name"]) return 0
Provide the program's entry point when directly executed.
5,917
def Pgas(rho,T,mu): R = old_div(boltzmann_constant, atomic_mass_unit) return (old_div(R,mu)) * rho * T
P = R/mu * rho * T Parameters ---------- mu : float Mean molecular weight rho : float Density [cgs] T : float Temperature [K]
5,918
def periodic_send(self, content, interval, title=): url = .format(self.remote) if isinstance(interval, datetime.timedelta): interval = int(interval.total_seconds()) if not isinstance(interval, int): raise ValueError data = self._wrap_post_data(title=title, content=content, interval=interval) res = requests.post(url, data, timeout=self.timeout) if res.status_code == requests.codes.ok: res_data = json.loads(self._convert_bytes(res.content)) if res_data.get() == STATUS_SUCCESS: return True, res_data.get() return False, res_data.get() res.raise_for_status() return False,
发送周期消息 :param content: (必填|str) - 需要发送的消息内容 :param interval: (必填|int|datetime.timedelta) - 发送消息间隔时间,支持 datetime.timedelta 或 integer 表示的秒数 :param title: (选填|str) - 需要发送的消息标题 :return: * status:发送状态,True 发送成,False 发送失败 * message:发送失败详情
5,919
def add(self, pat, fun): r self._pat = None self._pats.append(pat) self._funs.append(fun)
r"""Add a pattern and replacement. The pattern must not contain capturing groups. The replacement might be either a string template in which \& will be replaced with the match, or a function that will get the matching text as argument. It does not get match object, because capturing is forbidden anyway.
5,920
def publish(dataset_uri): try: dataset = dtoolcore.DataSet.from_uri(dataset_uri) except dtoolcore.DtoolCoreTypeError: print("Not a dataset: {}".format(dataset_uri)) sys.exit(1) try: access_uri = dataset._storage_broker.http_enable() except AttributeError: print( "Datasets of type cannot be published using HTTP".format( dataset._storage_broker.key) ) sys.exit(2) return access_uri
Return access URL to HTTP enabled (published) dataset. Exits with error code 1 if the dataset_uri is not a dataset. Exits with error code 2 if the dataset cannot be HTTP enabled.
5,921
def loggable(obj): if isinstance(obj, logging.Logger): return True else: return (inspect.isclass(obj) and inspect.ismethod(getattr(obj, , None)) and inspect.ismethod(getattr(obj, , None)) and inspect.ismethod(getattr(obj, , None)))
Return "True" if the obj implements the minimum Logger API required by the 'trace' decorator.
5,922
def set_resolved_name(self, ref: dict, type_name2solve: TypeName, type_name_ref: TypeName): if self.resolution[type_name2solve.value] is None: self.resolution[type_name2solve.value] = ref[type_name_ref.value]
Warning!!! Need to rethink it when global poly type
5,923
def nl_nlmsg_flags2str(flags, buf, _=None): del buf[:] all_flags = ( (, libnl.linux_private.netlink.NLM_F_REQUEST), (, libnl.linux_private.netlink.NLM_F_MULTI), (, libnl.linux_private.netlink.NLM_F_ACK), (, libnl.linux_private.netlink.NLM_F_ECHO), (, libnl.linux_private.netlink.NLM_F_ROOT), (, libnl.linux_private.netlink.NLM_F_MATCH), (, libnl.linux_private.netlink.NLM_F_ATOMIC), (, libnl.linux_private.netlink.NLM_F_REPLACE), (, libnl.linux_private.netlink.NLM_F_EXCL), (, libnl.linux_private.netlink.NLM_F_CREATE), (, libnl.linux_private.netlink.NLM_F_APPEND), ) print_flags = [] for k, v in all_flags: if not flags & v: continue flags &= ~v print_flags.append(k) if flags: print_flags.append(.format(flags)) buf.extend(.join(print_flags).encode()) return buf
Netlink Message Flags Translations. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/msg.c#L664 Positional arguments: flags -- integer. buf -- bytearray(). Keyword arguments: _ -- unused. Returns: Reference to `buf`.
5,924
def parse(self, *args, **kwargs): cmd = args[0] resp = args[1] if cmd in self.parsers: try: return self.parsers[cmd](resp) except Exception as err: print(err) return {}
Parse response. :param args: List. 2 first items used as parser name and response to parse :param kwargs: dict, not used :return: dictionary or return value of called callable from parser.
5,925
def initializeSessionAsAlice(sessionState, sessionVersion, parameters): sessionState.setSessionVersion(sessionVersion) sessionState.setRemoteIdentityKey(parameters.getTheirIdentityKey()) sessionState.setLocalIdentityKey(parameters.getOurIdentityKey().getPublicKey()) sendingRatchetKey = Curve.generateKeyPair() secrets = bytearray() if sessionVersion >= 3: secrets.extend(RatchetingSession.getDiscontinuityBytes()) secrets.extend(Curve.calculateAgreement(parameters.getTheirSignedPreKey(), parameters.getOurIdentityKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirIdentityKey().getPublicKey(), parameters.getOurBaseKey().getPrivateKey())) secrets.extend(Curve.calculateAgreement(parameters.getTheirSignedPreKey(), parameters.getOurBaseKey().getPrivateKey())) if sessionVersion >= 3 and parameters.getTheirOneTimePreKey() is not None: secrets.extend(Curve.calculateAgreement(parameters.getTheirOneTimePreKey(), parameters.getOurBaseKey().getPrivateKey())) derivedKeys = RatchetingSession.calculateDerivedKeys(sessionVersion, secrets) sendingChain = derivedKeys.getRootKey().createChain(parameters.getTheirRatchetKey(), sendingRatchetKey) sessionState.addReceiverChain(parameters.getTheirRatchetKey(), derivedKeys.getChainKey()) sessionState.setSenderChain(sendingRatchetKey, sendingChain[1]) sessionState.setRootKey(sendingChain[0])
:type sessionState: SessionState :type sessionVersion: int :type parameters: AliceAxolotlParameters
5,926
def _rem_id_from_keys(self, pk, conn=None): if conn is None: conn = self._get_connection() conn.srem(self._get_ids_key(), pk)
_rem_id_from_keys - Remove primary key from table internal
5,927
def overlay_gateway_site_bfd_enable(self, **kwargs): config = ET.Element("config") overlay_gateway = ET.SubElement(config, "overlay-gateway", xmlns="urn:brocade.com:mgmt:brocade-tunnels") name_key = ET.SubElement(overlay_gateway, "name") name_key.text = kwargs.pop() site = ET.SubElement(overlay_gateway, "site") name_key = ET.SubElement(site, "name") name_key.text = kwargs.pop() bfd_enable = ET.SubElement(site, "bfd-enable") callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
5,928
def update(self, widget, widget_tree): self.listeners_list = [] self.build_widget_list_from_tree(widget_tree) self.label.set_text( + widget.attributes[]) self.container = gui.VBox(width=, height=) self.container.style[] = self.container.style[] = self.append(self.container, ) for (setOnEventListenerFuncname,setOnEventListenerFunc) in inspect.getmembers(widget): if hasattr(setOnEventListenerFunc, ): self.container.append( SignalConnection(widget, self.listeners_list, setOnEventListenerFuncname, setOnEventListenerFunc, width=) )
for the selected widget are listed the relative signals for each signal there is a dropdown containing all the widgets the user will select the widget that have to listen a specific event
5,929
def delete_char(self, e): u self.l_buffer.delete_char(self.argument_reset) self.finalize()
u"""Delete the character at point. If point is at the beginning of the line, there are no characters in the line, and the last character typed was not bound to delete-char, then return EOF.
5,930
def _get_cpu_info_from_sysctl(): try: if not DataSource.has_sysctl(): return {} returncode, output = DataSource.sysctl_machdep_cpu_hw_cpufrequency() if output == None or returncode != 0: return {} vendor_id = _get_field(False, output, None, None, ) processor_brand = _get_field(True, output, None, None, ) cache_size = _get_field(False, output, None, None, ) stepping = _get_field(False, output, int, 0, ) model = _get_field(False, output, int, 0, ) family = _get_field(False, output, int, 0, ) flags = _get_field(False, output, None, , ).lower().split() flags.extend(_get_field(False, output, None, , ).lower().split()) flags.extend(_get_field(False, output, None, , ).lower().split()) flags.sort() hz_advertised, scale = _parse_cpu_brand_string(processor_brand) hz_actual = _get_field(False, output, None, None, ) hz_actual = _to_decimal_string(hz_actual) info = { : vendor_id, : processor_brand, : _hz_short_to_friendly(hz_advertised, scale), : _hz_short_to_friendly(hz_actual, 0), : _hz_short_to_full(hz_advertised, scale), : _hz_short_to_full(hz_actual, 0), : _to_friendly_bytes(cache_size), : stepping, : model, : family, : flags } info = {k: v for k, v in info.items() if v} return info except: return {}
Returns the CPU info gathered from sysctl. Returns {} if sysctl is not found.
5,931
def createFileLink(self, resourceno): data = {: , : resourceno, : self.user_id, : self.useridx, } s, metadata = self.GET(, data) if s: print "URL: %s" % (metadata[]) return metadata[] else: print "Error createFileLink: %s" % (metadata) return False
Make a link of file If you don't know ``resourceno``, you'd better use ``getFileLink``. :param resourceno: Resource number of a file to create link :return: ``Shared url`` or ``False`` when failed to share a file
5,932
def generate(size, output, schema): pii_data = randomnames.NameList(size) if schema is not None: raise NotImplementedError randomnames.save_csv( pii_data.names, [f.identifier for f in pii_data.SCHEMA.fields], output)
Generate fake PII data for testing
5,933
def _build_metrics(func_name, namespace): metrics = {: func_name, : datetime.utcnow(), : .format(list(sys.argv)), : getpass.getuser()} assert isinstance(namespace, Namespace) tmp_dic = vars(namespace) metrics[] = tmp_dic.get() metrics[] = tmp_dic.get() metrics[] = tmp_dic.get() metrics[] = socket.gethostname() extra = json.dumps(dict((k, metrics[k]) for k in (, ))) log = Log( event=.format(func_name), task_instance=None, owner=metrics[], extra=extra, task_id=metrics.get(), dag_id=metrics.get(), execution_date=metrics.get()) metrics[] = log return metrics
Builds metrics dict from function args It assumes that function arguments is from airflow.bin.cli module's function and has Namespace instance where it optionally contains "dag_id", "task_id", and "execution_date". :param func_name: name of function :param namespace: Namespace instance from argparse :return: dict with metrics
5,934
def forcemerge(self, index=None, params=None): return self.transport.perform_request( "POST", _make_path(index, "_forcemerge"), params=params )
The force merge API allows to force merging of one or more indices through an API. The merge relates to the number of segments a Lucene index holds within each shard. The force merge operation allows to reduce the number of segments by merging them. This call will block until the merge is complete. If the http connection is lost, the request will continue in the background, and any new requests will block until the previous force merge is complete. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-forcemerge.html>`_ :arg index: A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'open', valid choices are: 'open', 'closed', 'none', 'all' :arg flush: Specify whether the index should be flushed after performing the operation (default: true) :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg max_num_segments: The number of segments the index should be merged into (default: dynamic) :arg only_expunge_deletes: Specify whether the operation should only expunge deleted documents :arg operation_threading: TODO: ?
5,935
def start(self): timeout = self.timeout() if timeout is not None and timeout > 0: self.__loop.add_timeout(timedelta(0, timeout), self.stop) self.handler().setup_handler(self.loop()) self.loop().start() self.handler().loop_stopped()
Set up handler and start loop :return: None
5,936
def actually_flatten(iterable): remainder = iter(iterable) while True: first = next(remainder) is_iter = isinstance(first, collections.Iterable) try: basestring except NameError: basestring = str if is_py3() and is_iter and not_a_string(first): remainder = IT.chain(first, remainder) elif (not is_py3()) and is_iter and not isinstance(first, basestring): remainder = IT.chain(first, remainder) else: yield polite_string(first)
Flatten iterables This is super ugly. There must be a cleaner py2/3 way of handling this.
5,937
def get(self, pk, **kwargs): item = self.datamodel.get(pk, self._base_filters) if not item: return self.response_404() _response = dict() _args = kwargs.get("rison", {}) select_cols = _args.get(API_SELECT_COLUMNS_RIS_KEY, []) _pruned_select_cols = [col for col in select_cols if col in self.show_columns] self.set_response_key_mappings( _response, self.get, _args, **{API_SELECT_COLUMNS_RIS_KEY: _pruned_select_cols} ) if _pruned_select_cols: _show_model_schema = self.model2schemaconverter.convert(_pruned_select_cols) else: _show_model_schema = self.show_model_schema _response["id"] = pk _response[API_RESULT_RES_KEY] = _show_model_schema.dump(item, many=False).data self.pre_get(_response) return self.response(200, **_response)
Get item from Model --- get: parameters: - in: path schema: type: integer name: pk - $ref: '#/components/parameters/get_item_schema' responses: 200: description: Item from Model content: application/json: schema: type: object properties: label_columns: type: object show_columns: type: array items: type: string description_columns: type: object show_title: type: string id: type: string result: $ref: '#/components/schemas/{{self.__class__.__name__}}.get' 400: $ref: '#/components/responses/400' 401: $ref: '#/components/responses/401' 404: $ref: '#/components/responses/404' 422: $ref: '#/components/responses/422' 500: $ref: '#/components/responses/500'
5,938
def ansi_split(text, _re=re.compile(u"(\x1b\\[(\\d*;?)*\\S)")): for part in _re.split(text): if part: yield (bool(_re.match(part)), part)
Yields (is_ansi, text)
5,939
def _encode(self): data = ByteBuffer() if not hasattr(self, ): return data.tostring() for field in self.__fields__: field.encode(self, data) return data.tostring()
Encode the message and return a bytestring.
5,940
def configure_settings(): if not settings.configured: db_config = { : , : , } settings.configure( TEST_RUNNER=, NOSE_ARGS=[, , ], DATABASES={ : db_config, }, INSTALLED_APPS=( , , , , , , ), ROOT_URLCONF=, DEBUG=True, MIDDLEWARE_CLASSES=(), )
Configures settings for manage.py and for run_tests.py.
5,941
def addFull(self, vehID, routeID, typeID="DEFAULT_VEHTYPE", depart=None, departLane="first", departPos="base", departSpeed="0", arrivalLane="current", arrivalPos="max", arrivalSpeed="current", fromTaz="", toTaz="", line="", personCapacity=0, personNumber=0): messageString = struct.pack("!Bi", tc.TYPE_COMPOUND, 14) if depart is None: depart = str(self._connection.simulation.getCurrentTime() / 1000.) for val in (routeID, typeID, depart, departLane, departPos, departSpeed, arrivalLane, arrivalPos, arrivalSpeed, fromTaz, toTaz, line): messageString += struct.pack("!Bi", tc.TYPE_STRING, len(val)) + str(val).encode("latin1") messageString += struct.pack("!Bi", tc.TYPE_INTEGER, personCapacity) messageString += struct.pack("!Bi", tc.TYPE_INTEGER, personNumber) self._connection._beginMessage( tc.CMD_SET_VEHICLE_VARIABLE, tc.ADD_FULL, vehID, len(messageString)) self._connection._string += messageString self._connection._sendExact()
Add a new vehicle (new style with all possible parameters)
5,942
def b58encode_int(i, default_one=True): if not i and default_one: return alphabet[0] string = "" while i: i, idx = divmod(i, 58) string = alphabet[idx] + string return string
Encode an integer using Base58
5,943
def column_mask(self): margin = compress_pruned( self._slice.margin( axis=0, weighted=False, include_transforms_for_dims=self._hs_dims, prune=self._prune, ) ) mask = margin < self._size if margin.shape == self._shape: return np.logical_or(np.zeros(self._shape, dtype=bool), mask)
ndarray, True where column margin <= min_base_size, same shape as slice.
5,944
def weld_str_replace(array, pat, rep): obj_id, weld_obj = create_weld_object(array) pat_id = get_weld_obj_id(weld_obj, pat) rep_id = get_weld_obj_id(weld_obj, rep) weld_template = weld_obj.weld_code = weld_template.format(array=obj_id, pat=pat_id, rep=rep_id) return weld_obj
Replace first occurrence of pat with rep. Parameters ---------- array : numpy.ndarray or WeldObject Input data. pat : str To find. rep : str To replace with. Returns ------- WeldObject Representation of this computation.
5,945
def webui_schematics_assets_asset_asset_type_image_base_64_image(self, **kwargs): config = ET.Element("config") webui = ET.SubElement(config, "webui", xmlns="http://tail-f.com/ns/webui") schematics = ET.SubElement(webui, "schematics") assets = ET.SubElement(schematics, "assets") asset = ET.SubElement(assets, "asset") name_key = ET.SubElement(asset, "name") name_key.text = kwargs.pop() asset_type = ET.SubElement(asset, "asset-type") image = ET.SubElement(asset_type, "image") base_64_image = ET.SubElement(image, "base-64-image") base_64_image.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
5,946
def classe(self, name): for klass in self.classes(): if klass.node.name == name: return klass raise KeyError(name)
return a class by its name, raise KeyError if not found
5,947
def _encode_binary(message, on=1, off=0): l = _encode_morse(message) s = .join(l) l = list(s) bin_conv = {: [on], : [on] * 3, : [off]} l = map(lambda symb: [off] + bin_conv[symb], l) lst = [item for sublist in l for item in sublist] return lst[1:]
>>> message = "SOS" >>> _encode_binary(message) [1, 0, 1, 0, 1, 0, 0, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1] >>> _encode_binary(message, on='1', off='0') ['1', '0', '1', '0', '1', '0', '0', '0', '1', '1', '1', '0', '1', '1', '1', '0', '1', '1', '1', '0', '0', '0', '1', '0', '1', '0', '1']
5,948
def position_input(obj, visible=False): if not obj.generic_position.all(): ObjectPosition.objects.create(content_object=obj) return {: obj, : visible, : obj.generic_position.all()[0]}
Template tag to return an input field for the position of the object.
5,949
def render_html(html_str): import utool as ut from os.path import abspath import webbrowser try: html_str = html_str.decode() except Exception: pass html_dpath = ut.ensure_app_resource_dir(, ) fpath = abspath(ut.unixjoin(html_dpath, )) url = + fpath ut.writeto(fpath, html_str) webbrowser.open(url)
makes a temporary html rendering
5,950
def asdict(self, rawkey=False): r if rawkey: return dict(self.items()) else: return { str(k): v for k, v in self.items() }
r"""Convert Result to dict. Parameters: rawkey(bool): * True: dict key is Descriptor instance * False: dict key is str Returns: dict
5,951
def _get_interfaces(self): ios_cfg = self._get_running_config() parse = HTParser(ios_cfg) itfcs_raw = parse.find_lines("^interface GigabitEthernet") itfcs = [raw_if.strip().split()[1] for raw_if in itfcs_raw] LOG.debug("Interfaces on hosting device: %s", itfcs) return itfcs
Get a list of interfaces on this hosting device. :return: List of the interfaces
5,952
def _resolveAddress(address): address = address.split() assert len(address) in (1, 2) address[0] = socket.gethostbyname(address[0]) return .join(address)
Resolves the host in the given string. The input is of the form host[:port]. This method is idempotent, i.e. the host may already be a dotted IP address. >>> # noinspection PyProtectedMember >>> f=MesosBatchSystem._resolveAddress >>> f('localhost') '127.0.0.1' >>> f('127.0.0.1') '127.0.0.1' >>> f('localhost:123') '127.0.0.1:123' >>> f('127.0.0.1:123') '127.0.0.1:123'
5,953
def UpdateSNMPObjsAsync(): if threading.active_count() == 1: LogMsg("Creating thread for UpdateSNMPObjs().") t = threading.Thread(target=UpdateSNMPObjs, name="UpdateSNMPObjsThread") t.daemon = True t.start() else: LogMsg("Data update still active, data update interval too low?")
Starts UpdateSNMPObjs() in a separate thread.
5,954
def excepthook(type, value, tb): if (issubclass(type, Error) or issubclass(type, lib50.Error)) and str(value): for line in str(value).split("\n"): cprint(str(line), "yellow") else: cprint(_("Sorry, something's wrong! Let [email protected] know!"), "yellow") if excepthook.verbose: traceback.print_exception(type, value, tb) cprint(_("Submission cancelled."), "red")
Report an exception.
5,955
def get_binary_path(executable, logging_level=): if sys.platform == : if executable == : return executable executable = executable + if executable in os.listdir(): binary = os.path.join(os.getcwd(), executable) else: binary = next((os.path.join(path, executable) for path in os.environ[].split(os.pathsep) if os.path.isfile(os.path.join(path, executable))), None) else: venv_parent = get_venv_parent_path() venv_bin_path = os.path.join(venv_parent, , ) if not venv_bin_path in os.environ.get(): if logging_level == : print(f) os.environ[] = os.pathsep.join([os.environ[], venv_bin_path]) binary = shutil.which(executable) return binary if binary else None
Gets the software name and returns the path of the binary.
5,956
def transform_vector_coorb_to_inertial(vec_coorb, orbPhase, quat_copr): vec_copr = rotate_in_plane(vec_coorb, -orbPhase) vec = transformTimeDependentVector(np.array([quat_copr]).T, np.array([vec_copr]).T).T[0] return np.array(vec)
Given a vector (of size 3) in coorbital frame, orbital phase in coprecessing frame and a minimal rotation frame quat, transforms the vector from the coorbital to the inertial frame.
5,957
def __kullback_leibler(h1, h2): result = h1.astype(scipy.float_) mask = h1 != 0 result[mask] = scipy.multiply(h1[mask], scipy.log(h1[mask] / h2[mask])) return scipy.sum(result)
The actual KL implementation. @see kullback_leibler() for details. Expects the histograms to be of type scipy.ndarray.
5,958
def post_event(event, channel=None, username=None, api_url=None, hook=None): if not api_url: api_url = _get_api_url() if not hook: hook = _get_hook() if not username: username = _get_username() if not channel: channel = _get_channel() if not event: log.error() log.debug(, event) log.debug(, event[]) message = .format(event[]) for key, value in six.iteritems(event[]): message += .format(key, value) result = post_message(channel, username, message, api_url, hook) return bool(result)
Send an event to a Mattermost channel. :param channel: The channel name, either will work. :param username: The username of the poster. :param event: The event to send to the Mattermost channel. :param api_url: The Mattermost api url, if not specified in the configuration. :param hook: The Mattermost hook, if not specified in the configuration. :return: Boolean if message was sent successfully.
5,959
def backprop(self, input_data, df_output, cache=None): if self.l1_penalty_weight: df_W += self.l1_penalty_weight * sign(self.W) if self.l2_penalty_weight: df_W += self.l2_penalty_weight * self.W return (df_W, df_b), df_input
Backpropagate through the hidden layer **Parameters:** input_data : ``GPUArray`` Input data to compute activations for. df_output : ``GPUArray`` Gradients with respect to the activations of this layer (received from the layer above). cache : list of ``GPUArray`` Cache obtained from forward pass. If the cache is provided, then the activations are not recalculated. **Returns:** gradients : tuple of ``GPUArray`` Gradients with respect to the weights and biases in the form ``(df_weights, df_biases)``. df_input : ``GPUArray`` Gradients with respect to the input.
5,960
def has_symbol(self, symbol, as_of=None): try: self._read_metadata(symbol, as_of=as_of, read_preference=ReadPreference.PRIMARY) return True except NoDataFoundException: return False
Return True if the 'symbol' exists in this library AND the symbol isn't deleted in the specified as_of. It's possible for a deleted symbol to exist in older snapshots. Parameters ---------- symbol : `str` symbol name for the item as_of : `str` or int or `datetime.datetime` Return the data as it was as_of the point in time. `int` : specific version number `str` : snapshot name which contains the version `datetime.datetime` : the version of the data that existed as_of the requested point in time
5,961
def _create_object(self, data, request): if request.method.upper() == and self.post_factory: fac_func = self.post_factory.create else: fac_func = self.factory.create if isinstance(data, (list, tuple)): return map(fac_func, data) else: return fac_func(data)
Create a python object from the given data. This will use ``self.factory`` object's ``create()`` function to create the data. If no factory is defined, this will simply return the same data that was given.
5,962
def mark_seen(self): data = self.get_selected_item() if data[]: with self.term.loader(): data[].mark_as_read() if not self.term.loader.exception: data[] = False else: with self.term.loader(): data[].mark_as_unread() if not self.term.loader.exception: data[] = True
Mark the selected message or comment as seen.
5,963
def getSlaveStatus(self): info_dict = {} if self.checkVersion(): cols = [, , , , , , , , , , , , ,] cur = self._conn.cursor() cur.execute( % .join(cols)) rows = cur.fetchall() for row in rows: info_dict[row[0]] = dict(zip(cols[1:], row[1:])) else: return None return info_dict
Returns status of replication slaves. @return: Dictionary of status items.
5,964
def addition_circuit( addend0: Qubits, addend1: Qubits, carry: Qubits) -> Circuit: if len(addend0) != len(addend1): raise ValueError() if len(carry) != 2: raise ValueError() def _maj(qubits: Qubits) -> Circuit: q0, q1, q2 = qubits circ = Circuit() circ += CNOT(q2, q1) circ += CNOT(q2, q0) circ += CCNOT(q0, q1, q2) return circ def _uma(qubits: Qubits) -> Circuit: q0, q1, q2 = qubits circ = Circuit() circ += CCNOT(q0, q1, q2) circ += CNOT(q2, q0) circ += CNOT(q0, q1) return circ qubits = [carry[0]] + list(chain.from_iterable( zip(reversed(addend1), reversed(addend0)))) + [carry[1]] circ = Circuit() for n in range(0, len(qubits)-3, 2): circ += _maj(qubits[n:n+3]) circ += CNOT(qubits[-2], qubits[-1]) for n in reversed(range(0, len(qubits)-3, 2)): circ += _uma(qubits[n:n+3]) return circ
Returns a quantum circuit for ripple-carry addition. [Cuccaro2004]_ Requires two carry qubit (input and output). The result is returned in addend1. .. [Cuccaro2004] A new quantum ripple-carry addition circuit, Steven A. Cuccaro, Thomas G. Draper, Samuel A. Kutin, David Petrie Moulton arXiv:quant-ph/0410184 (2004)
5,965
def get_text_field_mask(text_field_tensors: Dict[str, torch.Tensor], num_wrapping_dims: int = 0) -> torch.LongTensor: if "mask" in text_field_tensors: return text_field_tensors["mask"] tensor_dims = [(tensor.dim(), tensor) for tensor in text_field_tensors.values()] tensor_dims.sort(key=lambda x: x[0]) smallest_dim = tensor_dims[0][0] - num_wrapping_dims if smallest_dim == 2: token_tensor = tensor_dims[0][1] return (token_tensor != 0).long() elif smallest_dim == 3: character_tensor = tensor_dims[0][1] return ((character_tensor > 0).long().sum(dim=-1) > 0).long() else: raise ValueError("Expected a tensor with dimension 2 or 3, found {}".format(smallest_dim))
Takes the dictionary of tensors produced by a ``TextField`` and returns a mask with 0 where the tokens are padding, and 1 otherwise. We also handle ``TextFields`` wrapped by an arbitrary number of ``ListFields``, where the number of wrapping ``ListFields`` is given by ``num_wrapping_dims``. If ``num_wrapping_dims == 0``, the returned mask has shape ``(batch_size, num_tokens)``. If ``num_wrapping_dims > 0`` then the returned mask has ``num_wrapping_dims`` extra dimensions, so the shape will be ``(batch_size, ..., num_tokens)``. There could be several entries in the tensor dictionary with different shapes (e.g., one for word ids, one for character ids). In order to get a token mask, we use the tensor in the dictionary with the lowest number of dimensions. After subtracting ``num_wrapping_dims``, if this tensor has two dimensions we assume it has shape ``(batch_size, ..., num_tokens)``, and use it for the mask. If instead it has three dimensions, we assume it has shape ``(batch_size, ..., num_tokens, num_features)``, and sum over the last dimension to produce the mask. Most frequently this will be a character id tensor, but it could also be a featurized representation of each token, etc. If the input ``text_field_tensors`` contains the "mask" key, this is returned instead of inferring the mask. TODO(joelgrus): can we change this? NOTE: Our functions for generating masks create torch.LongTensors, because using torch.ByteTensors makes it easy to run into overflow errors when doing mask manipulation, such as summing to get the lengths of sequences - see below. >>> mask = torch.ones([260]).byte() >>> mask.sum() # equals 260. >>> var_mask = torch.autograd.V(mask) >>> var_mask.sum() # equals 4, due to 8 bit precision - the sum overflows.
5,966
def _setup_watch(self, alias, path, flags): assert alias not in self.descriptors, "Registering alias %s twice!" % alias wd = LibC.inotify_add_watch(self._fd, path, flags) if wd < 0: raise IOError("Error setting up watch on %s with flags %s: wd=%s" % ( path, flags, wd)) self.descriptors[alias] = wd self.aliases[wd] = alias
Actual rule setup.
5,967
def _search(self, searchfilter, attrs, basedn): if attrs == NO_ATTR: attrlist = [] elif attrs == DISPLAYED_ATTRS: attrlist = self.attrlist elif attrs == LISTED_ATTRS: attrlist = self.attrlist elif attrs == ALL_ATTRS: attrlist = None else: attrlist = None self._logger( severity=logging.DEBUG, msg="%(backend)s: executing search " "with filter in DN " % { : self.backend_name, : basedn, : self._uni(searchfilter) } ) ldap_client = self._bind() try: r = ldap_client.search_s( basedn, ldap.SCOPE_SUBTREE, searchfilter, attrlist=attrlist ) except Exception as e: ldap_client.unbind_s() self._exception_handler(e) ldap_client.unbind_s() ret = [] for entry in r: uni_dn = self._uni(entry[0]) uni_attrs = {} for attr in entry[1]: if type(entry[1][attr]) is list: tmp = [] for value in entry[1][attr]: tmp.append(self._uni(value)) else: tmp = self._uni(entry[1][attr]) uni_attrs[self._uni(attr)] = tmp ret.append((uni_dn, uni_attrs)) return ret
Generic search
5,968
def dumplist(args): from .query import Database db = Database() r = db.objects( protocol=args.protocol, purposes=args.purpose, model_ids=(args.client,), groups=args.group, classes=args.sclass ) output = sys.stdout if args.selftest: from bob.db.utils import null output = null() for f in r: output.write( % (f.make_path(args.directory, args.extension),)) return 0
Dumps lists of files based on your criteria
5,969
def add_property(self, c_property_tuple, sync=True): LOGGER.debug("Container.add_property") if c_property_tuple[1] is None: LOGGER.debug("Property " + c_property_tuple[0] + " has None value. Ignore.") return if not sync or self.id is None: self.properties_2_add.append(c_property_tuple) else: property_param = DriverTools.property_params(c_property_tuple[0], c_property_tuple[1]) params = SessionService.complete_transactional_req({: self.id}) if MappingService.driver_type != DriverFactory.DRIVER_REST: params[] = params[] = json.dumps(property_param) args = {: params} else: params[] = property_param[] params[] = property_param[] if in property_param: params[] = property_param[] args = {: , : , : params} response = ContainerService.requester.call(args) if MappingService.driver_type != DriverFactory.DRIVER_REST: response = response.get() if response.rc != 0: LOGGER.warning( + self.name + + str(response.response_content) + + str(response.error_message) + " (" + str(response.rc) + ")" ) if response.rc == 500 and ArianeMappingOverloadError.ERROR_MSG in response.error_message: raise ArianeMappingOverloadError("Container.add_property", ArianeMappingOverloadError.ERROR_MSG) else: self.sync()
add property to this container. if this container has no id then it's like sync=False. :param c_property_tuple: property tuple defined like this : => property name = c_property_tuple[0] => property value = c_property_tuple[1] :param sync: If sync=True(default) synchronize with Ariane server. If sync=False, add the property tuple object on list to be added on next save(). :return:
5,970
def serialize(self, value): if isinstance(value, list): return self.list_sep.join(_helpers.str_or_unicode(x.name) for x in value) else: return _helpers.str_or_unicode(value.name)
See base class.
5,971
def appendBitPadding(str, blocksize=AES_blocksize): 10 pad_len = paddingLength(len(str), blocksize) - 1 padding = chr(0x80)+*pad_len return str + padding
Bit padding a.k.a. One and Zeroes Padding A single set ('1') bit is added to the message and then as many reset ('0') bits as required (possibly none) are added. Input: (str) str - String to be padded (int) blocksize - block size of the algorithm Return: Padded string according to ANSI X.923 standart Used in when padding bit strings. 0x80 in binary is 10000000 0x00 in binary is 00000000 Defined in ANSI X.923 (based on NIST Special Publication 800-38A) and ISO/IEC 9797-1 as Padding Method 2. Used in hash functions MD5 and SHA, described in RFC 1321 step 3.1.
5,972
def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) def blacklist(app): return BlacklistFilter(app, conf) return blacklist
Returns a WSGI filter app for use with paste.deploy.
5,973
def end_prov_graph(self): endTime = Literal(datetime.now()) self.prov_g.add((self.entity_d, self.prov.generatedAtTime, endTime)) self.prov_g.add((self.activity, self.prov.endedAtTime, endTime))
Finalize prov recording with end time
5,974
def subspace_detector_plot(detector, stachans, size, **kwargs): import matplotlib.pyplot as plt if stachans == and not detector.multiplex: stachans = detector.stachans elif detector.multiplex: stachans = [(, )] if np.isinf(detector.dimension): msg = .join([, ]) warnings.warn(msg) nrows = detector.v[0].shape[1] else: nrows = detector.dimension fig, axes = plt.subplots(nrows=nrows, ncols=len(stachans), sharex=True, sharey=True, figsize=size) x = np.arange(len(detector.u[0]), dtype=np.float32) if detector.multiplex: x /= len(detector.stachans) * detector.sampling_rate else: x /= detector.sampling_rate for column, stachan in enumerate(stachans): channel = detector.u[column] for row, vector in enumerate(channel.T[0:nrows]): if len(stachans) == 1: if nrows == 1: axis = axes else: axis = axes[row] else: axis = axes[row, column] if row == 0: axis.set_title(.join(stachan)) axis.plot(x, vector, , linewidth=1.1) if column == 0: axis.set_ylabel( % (row + 1), rotation=0) if row == nrows - 1: axis.set_xlabel() axis.set_yticks([]) plt.subplots_adjust(hspace=0.05) plt.subplots_adjust(wspace=0.05) fig = _finalise_figure(fig=fig, **kwargs) return fig
Plotting for the subspace detector class. Plot the output basis vectors for the detector at the given dimension. Corresponds to the first n horizontal vectors of the V matrix. :type detector: :class:`eqcorrscan.core.subspace.Detector` :type stachans: list :param stachans: list of tuples of station, channel pairs to plot. :type stachans: list :param stachans: List of tuples of (station, channel) to use. Can set\ to 'all' to use all the station-channel pairs available. If \ detector is multiplexed, will just plot that. :type size: tuple :param size: Figure size. :returns: Figure :rtype: matplotlib.pyplot.Figure .. rubric:: Example >>> from eqcorrscan.core import subspace >>> import os >>> detector = subspace.Detector() >>> detector.read(os.path.join( ... os.path.abspath(os.path.dirname(__file__)), ... '..', 'tests', 'test_data', 'subspace', ... 'stat_test_detector.h5')) Detector: Tester >>> subspace_detector_plot(detector=detector, stachans='all', size=(10, 7), ... show=True) # doctest: +SKIP .. plot:: from eqcorrscan.core import subspace from eqcorrscan.utils.plotting import subspace_detector_plot import os print('running subspace plot') detector = subspace.Detector() detector.read(os.path.join('..', '..', '..', 'tests', 'test_data', 'subspace', 'stat_test_detector.h5')) subspace_detector_plot(detector=detector, stachans='all', size=(10, 7), show=True)
5,975
def screenshot(self, png_filename=None, format=): value = self.http.get().value raw_value = base64.b64decode(value) png_header = b"\x89PNG\r\n\x1a\n" if not raw_value.startswith(png_header) and png_filename: raise WDAError(-1, "screenshot png format error") if png_filename: with open(png_filename, ) as f: f.write(raw_value) if format == : return raw_value elif format == : from PIL import Image buff = io.BytesIO(raw_value) return Image.open(buff) else: raise ValueError("unknown format")
Screenshot with PNG format Args: png_filename(string): optional, save file name format(string): return format, pillow or raw(default) Returns: raw data or PIL.Image Raises: WDAError
5,976
def getNextSample(self, V): randPos = random.randint(0, len(V)-2) W = copy.deepcopy(V) d = V[randPos] c = V[randPos+1] W[randPos] = c W[randPos+1] = d prMW = 1 prMV = 1 prob = min(1.0,(prMW/prMV)*pow(self.phi, self.wmg[d][c]))/2 if random.random() <= prob: V = W return V
Generate the next sample by randomly flipping two adjacent candidates. :ivar list<int> V: Contains integer representations of each candidate in order of their ranking in a vote, from first to last. This is the current sample.
5,977
def control_group(action, action_space, control_group_act, control_group_id): del action_space select = action.action_ui.control_group select.action = control_group_act select.control_group_index = control_group_id
Act on a control group, selecting, setting, etc.
5,978
async def set_power( self, power_type: str, power_parameters: typing.Mapping[str, typing.Any] = {}): data = await self._handler.update( system_id=self.system_id, power_type=power_type, power_parameters=power_parameters) self.power_type = data[]
Set the power type and power parameters for this node.
5,979
def get_selected_files(self, pipeline=, forfile=None, quiet=0, allowedfileformats=): file_dict = dict(self.bids_tags) if allowedfileformats == : allowedfileformats = [, ] if forfile: if isinstance(forfile, str): forfile = get_bids_tag(forfile, ) for n in forfile.keys(): file_dict[n] = [forfile[n]] non_entries = [] for n in file_dict: if not file_dict[n]: non_entries.append(n) for n in non_entries: file_dict.pop(n) file_components = [] for k in [, , , ]: if k in file_dict: file_components.append([k + + t for t in file_dict[k]]) file_list = list(itertools.product(*file_components)) if pipeline == : mdir = self.BIDS_dir + + self.pipeline elif pipeline == and self.confound_pipeline: mdir = self.BIDS_dir + + self.confound_pipeline elif pipeline == : mdir = self.BIDS_dir + + self.pipeline elif pipeline == : mdir = self.BIDS_dir + + teneto.__version__ else: raise ValueError() found_files = [] for f in file_list: wdir = str(mdir) sub = [t for t in f if t.startswith()] ses = [t for t in f if t.startswith()] wdir += + sub[0] + if ses: wdir += + ses[0] + wdir += if pipeline == : wdir += + self.pipeline_subdir + fileending = [self.bids_suffix + f for f in allowedfileformats] elif pipeline == : wdir += fileending = [ + f for f in allowedfileformats] elif pipeline == : fileending = [ + f for f in allowedfileformats] if os.path.exists(wdir): found = [] for ff in os.listdir(wdir): ftags = get_bids_tag(ff, ) t = [t for t in ftags if t in file_dict and ftags[t] in file_dict[t]] if len(t) == len(file_dict): found.append(ff) found = [f for f in found for e in fileending if f.endswith(e)] if pipeline == : found = [i for i in found if in i] else: found = [i for i in found if not in i] found = list( map(str.__add__, [re.sub(, , wdir)]*len(found), found)) found = [i for i in found if not any( [bf in i for bf in self.bad_files])] if found: found_files += found if quiet == -1: print(wdir) found_files = list(set(found_files)) if quiet == 0: print(found_files) return found_files
Parameters ---------- pipeline : string can be \'pipeline\' (main analysis pipeline, self in tnet.set_pipeline) or \'confound\' (where confound files are, set in tnet.set_confonud_pipeline()), \'functionalconnectivity\' quiet: int If 1, prints results. If 0, no results printed. forfile : str or dict A filename or dictionary of file tags. If this is set, only files that match that subject accepted_fileformat : list list of files formats that are acceptable. Default list is: ['.tsv', '.nii.gz'] Returns ------- found_files : list The files which are currently selected with the current using the set pipeline, pipeline_subdir, space, parcellation, tasks, runs, subjects etc. There are the files that will generally be used if calling a make_ function.
5,980
def get_curent_module_classes(module): classes = [] for name, obj in inspect.getmembers(module): if inspect.isclass(obj): classes.append(obj) return classes
获取制定模块的所有类 :param module: 模块 :return: 类的列表
5,981
def has_image(self, name: str) -> bool: path = "docker/images/{}".format(name) r = self.__api.head(path) if r.status_code == 204: return True elif r.status_code == 404: return False self.__api.handle_erroneous_response(r)
Determines whether the server has a Docker image with a given name.
5,982
def effectiv_num_data_points(self, kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps): num_linear = 0 if self._image_likelihood is True: num_linear = self.image_likelihood.num_param_linear(kwargs_lens, kwargs_source, kwargs_lens_light, kwargs_ps) num_param, _ = self.param.num_param() return self.num_data - num_param - num_linear
returns the effective number of data points considered in the X2 estimation to compute the reduced X2 value
5,983
def get(self, request, template_id, view_type): template = get_object_or_404(EnrollmentNotificationEmailTemplate, pk=template_id) if view_type not in self.view_type_contexts: return HttpResponse(status=404) base_context = self.view_type_contexts[view_type].copy() base_context.update({: self.get_user_name(request)}) return HttpResponse(template.render_html_template(base_context), content_type=)
Render the given template with the stock data.
5,984
def ngrams(path, elem, ignore_hash=True): grams = GramGenerator(path, elem, ignore_hash=ignore_hash) return FeatureSet({k: Feature(f) for k, f in grams})
Yields N-grams from a JSTOR DfR dataset. Parameters ---------- path : string Path to unzipped JSTOR DfR folder containing N-grams. elem : string Name of subdirectory containing N-grams. (e.g. 'bigrams'). ignore_hash : bool If True, will exclude all N-grams that contain the hash '#' character. Returns ------- ngrams : :class:`.FeatureSet`
5,985
def _validate_cidr(self, rule): try: network = ipaddress.IPv4Network(rule[]) except (ipaddress.NetmaskValueError, ValueError) as error: raise SpinnakerSecurityGroupCreationFailed(error) self.log.debug(, network.exploded) return True
Validate the cidr block in a rule. Returns: True: Upon successful completion. Raises: SpinnakerSecurityGroupCreationFailed: CIDR definition is invalid or the network range is too wide.
5,986
def _write_ctrl_meas(self): self._write_register_byte(_BME280_REGISTER_CTRL_HUM, self.overscan_humidity) self._write_register_byte(_BME280_REGISTER_CTRL_MEAS, self._ctrl_meas)
Write the values to the ctrl_meas and ctrl_hum registers in the device ctrl_meas sets the pressure and temperature data acquistion options ctrl_hum sets the humidty oversampling and must be written to first
5,987
def __initialize(self, sample): self.__processed = [False] * len(sample) self.__optics_objects = [optics_descriptor(i) for i in range(len(sample))] self.__ordered_database = [] self.__clusters = None self.__noise = None
! @brief Initializes internal states and resets clustering results in line with input sample.
5,988
def emit(self, record): try: if self.check_base_filename(record): self.build_base_filename() FileHandler.emit(self, record) except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
Emit a record. Always check time
5,989
def _isbn_cleanse(isbn, checksum=True): if not isinstance(isbn, string_types): raise TypeError( % isbn) if PY2 and isinstance(isbn, str): isbn = unicode(isbn) uni_input = False else: uni_input = True for dash in DASHES: isbn = isbn.replace(dash, unicode()) if checksum: if not isbn[:-1].isdigit(): raise IsbnError() if len(isbn) == 9: isbn = + isbn if len(isbn) == 10: if not (isbn[-1].isdigit() or isbn[-1] in ): raise IsbnError() elif len(isbn) == 13: if not isbn[-1].isdigit(): raise IsbnError() if not isbn.startswith((, )): raise IsbnError() else: raise IsbnError() else: if len(isbn) == 8: isbn = + isbn elif len(isbn) == 12 and not isbn[:3].startswith((, )): raise IsbnError() if not isbn.isdigit(): raise IsbnError() if not len(isbn) in (9, 12): raise IsbnError( ) if PY2 and not uni_input: return str(isbn) else: return isbn
Check ISBN is a string, and passes basic sanity checks. Args: isbn (str): SBN, ISBN-10 or ISBN-13 checksum (bool): ``True`` if ``isbn`` includes checksum character Returns: ``str``: ISBN with hyphenation removed, including when called with a SBN Raises: TypeError: ``isbn`` is not a ``str`` type IsbnError: Incorrect length for ``isbn`` IsbnError: Incorrect SBN or ISBN formatting
5,990
def get_graph_by_most_recent(self, name: str) -> Optional[BELGraph]: network = self.get_most_recent_network_by_name(name) if network is None: return return network.as_bel()
Get the most recently created network with the given name as a :class:`pybel.BELGraph`.
5,991
def arccalibration(wv_master, xpos_arc, naxis1_arc, crpix1, wv_ini_search, wv_end_search, wvmin_useful, wvmax_useful, error_xpos_arc, times_sigma_r, frac_triplets_for_sum, times_sigma_theil_sen, poly_degree_wfit, times_sigma_polfilt, times_sigma_cook, times_sigma_inclusion, geometry=None, debugplot=0): ntriplets_master, ratios_master_sorted, triplets_master_sorted_list = \ gen_triplets_master(wv_master=wv_master, geometry=geometry, debugplot=debugplot) list_of_wvfeatures = arccalibration_direct( wv_master=wv_master, ntriplets_master=ntriplets_master, ratios_master_sorted=ratios_master_sorted, triplets_master_sorted_list=triplets_master_sorted_list, xpos_arc=xpos_arc, naxis1_arc=naxis1_arc, crpix1=crpix1, wv_ini_search=wv_ini_search, wv_end_search=wv_end_search, wvmin_useful=wvmin_useful, wvmax_useful=wvmax_useful, error_xpos_arc=error_xpos_arc, times_sigma_r=times_sigma_r, frac_triplets_for_sum=frac_triplets_for_sum, times_sigma_theil_sen=times_sigma_theil_sen, poly_degree_wfit=poly_degree_wfit, times_sigma_polfilt=times_sigma_polfilt, times_sigma_cook=times_sigma_cook, times_sigma_inclusion=times_sigma_inclusion, geometry=geometry, debugplot=debugplot) return list_of_wvfeatures
Performs arc line identification for arc calibration. This function is a wrapper of two functions, which are responsible of computing all the relevant information concerning the triplets generated from the master table and the actual identification procedure of the arc lines, respectively. The separation of those computations in two different functions helps to avoid the repetition of calls to the first function when calibrating several arcs using the same master table. Parameters ---------- wv_master : 1d numpy array, float Array with wavelengths corresponding to the master table (Angstroms). xpos_arc : 1d numpy array, float Location of arc lines (pixels). naxis1_arc : int NAXIS1 for arc spectrum. crpix1 : float CRPIX1 value to be employed in the wavelength calibration. wv_ini_search : float Minimum expected wavelength in spectrum. wv_end_search : float Maximum expected wavelength in spectrum. wvmin_useful : float If not None, this value is used to clip detected lines below it. wvmax_useful : float If not None, this value is used to clip detected lines above it. error_xpos_arc : float Error in arc line position (pixels). times_sigma_r : float Times sigma to search for valid line position ratios. frac_triplets_for_sum : float Fraction of distances to different triplets to sum when computing the cost function. times_sigma_theil_sen : float Number of times the (robust) standard deviation around the linear fit (using the Theil-Sen method) to reject points. poly_degree_wfit : int Degree for polynomial fit to wavelength calibration. times_sigma_polfilt : float Number of times the (robust) standard deviation around the polynomial fit to reject points. times_sigma_cook : float Number of times the standard deviation of Cook's distances to detect outliers. If zero, this method of outlier detection is ignored. times_sigma_inclusion : float Number of times the (robust) standard deviation around the polynomial fit to include a new line in the set of identified lines. geometry : tuple (4 integers) or None x, y, dx, dy values employed to set the window geometry. debugplot : int Determines whether intermediate computations and/or plots are displayed. The valid codes are defined in numina.array.display.pause_debugplot. Returns ------- list_of_wvfeatures : list (of WavecalFeature instances) A list of size equal to the number of identified lines, which elements are instances of the class WavecalFeature, containing all the relevant information concerning the line identification.
5,992
def _invertMapping(mapping): invertedMapping = ddict(set) for key, values in viewitems(mapping): for value in values: invertedMapping[value].add(key) return invertedMapping
Converts a protein to peptide or peptide to protein mapping. :param mapping: dict, for each key contains a set of entries :returns: an inverted mapping that each entry of the values points to a set of initial keys.
5,993
def _doc(from_func): def decorator(to_func): to_func.__doc__ = from_func.__doc__ return to_func return decorator
copy doc from one function to another use as a decorator eg:: @_doc(file.tell) def tell(..): ...
5,994
def configure(): completer = Completer() readline.set_completer_delims() readline.parse_and_bind() readline.set_completer(completer.path_completer) home = os.path.expanduser() if os.path.isfile(os.path.join(home, , )): with open(os.path.join(home, , ), ) as fp: config = yaml.load(fp.read()) else: config = [] project_name = input() existing_project = None for project in config: if project_name == project[]: existing_project = project_name if existing_project is not None: print(colored( + project_name + , )) overwrite = str_input(, [, ]) if overwrite == : return else: config = [project for project in config if project_name != project[]] image_path = os.path.expanduser(input()) path_unset = True while path_unset: project_path = os.path.expanduser(input()) if (project_path.find(image_path) == 0): print() else: path_unset = False print() print() print() print() architecture = int_input(, 0, 2, show_range = False) if architecture == 0: arch = img_dim = 224 conv_dim = 7 final_cutoff = 80 elif architecture == 1: arch = img_dim = 299 conv_dim = 10 final_cutoff = 80 else: arch = img_dim = 299 conv_dim = 8 final_cutoff = 80 api_port = int_input(, 1024, 49151) kfold = int_input(, 3, 10) kfold_every = bool_input() print() plot_cm = bool_input() batch_size = int_input(, 1, 64) learning_rate = float_input(, 0, 1) learning_rate_decay = float_input(, 0, 1) cycle = int_input(, 1, 10) num_rounds = int_input(, 1, 100) print() print( + str(img_dim) + ) print( + str(img_dim * 2) + ) print( + str(img_dim * 4) + ) img_resolution_index = int_input(, 0, 2, show_range = False) if img_resolution_index == 0: img_size = 1 elif img_resolution_index == 1: img_size = 2 else: img_size = 4 use_augmentation = str_input(, [, ]) if use_augmentation == : augmentations = select_augmentations() else: augmentations = None project = {: project_name, : image_path, : project_path, : plot_cm, : api_port, : kfold, : kfold_every, : cycle, : np.random.randint(9999), : batch_size, : learning_rate, : learning_rate_decay, : final_cutoff, : num_rounds, : img_size, : augmentations, : arch, : img_dim, : conv_dim, : False, : False, : False, : False, : False, : 0, : None, : None, : None} config.append(project) store_config(config) print() print(colored(, )) print() print() print() print(colored( + project_name, )) print() print(colored( + project_name, ))
Configure the transfer environment and store
5,995
def create_payment_transaction(cls, payment_transaction, **kwargs): kwargs[] = True if kwargs.get(): return cls._create_payment_transaction_with_http_info(payment_transaction, **kwargs) else: (data) = cls._create_payment_transaction_with_http_info(payment_transaction, **kwargs) return data
Create PaymentTransaction Create a new PaymentTransaction This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_payment_transaction(payment_transaction, async=True) >>> result = thread.get() :param async bool :param PaymentTransaction payment_transaction: Attributes of paymentTransaction to create (required) :return: PaymentTransaction If the method is called asynchronously, returns the request thread.
5,996
def _cnx_is_empty(in_file): with open(in_file) as in_handle: for i, line in enumerate(in_handle): if i > 0: return False return True
Check if cnr or cns files are empty (only have a header)
5,997
def detect_languages(self, texts): text_list = TextUtils.format_list_to_send(texts) infos_translate = TextDetectLanguageModel(text_list).to_dict() texts_for_detect = TextUtils.change_key(infos_translate, "text", "texts", infos_translate["text"]) mode_translate = TranslatorMode.DetectArray.value return self._get_content(texts_for_detect, mode_translate)
Params: ::texts = Array of texts for detect languages Returns: Returns language present on array of text.
5,998
def py_module_preamble(ctx: GeneratorContext,) -> GeneratedPyAST: preamble: List[ast.AST] = [] preamble.extend(_module_imports(ctx)) preamble.append(_from_module_import()) preamble.append(_ns_var()) return GeneratedPyAST(node=ast.NameConstant(None), dependencies=preamble)
Bootstrap a new module with imports and other boilerplate.
5,999
def get_model_file(name, root=os.path.join(base.data_dir(), )): r file_name = .format(name=name, short_hash=short_hash(name)) root = os.path.expanduser(root) file_path = os.path.join(root, file_name+) sha1_hash = _model_sha1[name] if os.path.exists(file_path): if check_sha1(file_path, sha1_hash): return file_path else: logging.warning() else: logging.info(, file_path) util.makedirs(root) zip_file_path = os.path.join(root, file_name+) repo_url = os.environ.get(, apache_repo_url) if repo_url[-1] != : repo_url = repo_url + download(_url_format.format(repo_url=repo_url, file_name=file_name), path=zip_file_path, overwrite=True) with zipfile.ZipFile(zip_file_path) as zf: zf.extractall(root) os.remove(zip_file_path) if check_sha1(file_path, sha1_hash): return file_path else: raise ValueError()
r"""Return location for the pretrained on local file system. This function will download from online model zoo when model cannot be found or has mismatch. The root directory will be created if it doesn't exist. Parameters ---------- name : str Name of the model. root : str, default $MXNET_HOME/models Location for keeping the model parameters. Returns ------- file_path Path to the requested pretrained model file.