Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
5,000
def filter_filenames(filenames): filenames_cleaned = [] for filename in filenames: keep = True for pattern in FILE_EXCLUDE_EXTENTIONS: if filename.endswith(pattern): keep = False for pattern in FILE_SKIP_PATTENRS: if pattern in filename: keep = False if keep: filenames_cleaned.append(filename) return filenames_cleaned
Skip files with extentions in `FILE_EXCLUDE_EXTENTIONS` and filenames that contain `FILE_SKIP_PATTENRS`.
5,001
def convert_constants(jmag, hmag, kmag, cjhk, cjh, cjk, chk, cj, ch, ck): if jmag is not None: if hmag is not None: if kmag is not None: return cjhk[0] + cjhk[1]*jmag + cjhk[2]*hmag + cjhk[3]*kmag else: return cjh[0] + cjh[1]*jmag + cjh[2]*hmag else: if kmag is not None: return cjk[0] + cjk[1]*jmag + cjk[2]*kmag else: return cj[0] + cj[1]*jmag else: if hmag is not None: if kmag is not None: return chk[0] + chk[1]*hmag + chk[2]*kmag else: return ch[0] + ch[1]*hmag else: if kmag is not None: return ck[0] + ck[1]*kmag else: return np.nan
This converts between JHK and BVRI/SDSS mags. Not meant to be used directly. See the functions below for more sensible interface. This function does the grunt work of converting from JHK to either BVRI or SDSS ugriz. while taking care of missing values for any of jmag, hmag, or kmag. Parameters ---------- jmag,hmag,kmag : float 2MASS J, H, Ks mags to use to convert. cjhk,cjh,cjk,chk,cj,ch,ck : lists Constants to use when converting. Returns ------- float The converted magnitude in SDSS or BVRI system.
5,002
def from_dc_code(cls, dc_code): result = cls.list() dc_codes = {} for dc in result: if dc.get(): dc_codes[dc[]] = dc[] return dc_codes.get(dc_code)
Retrieve the datacenter id associated to a dc_code
5,003
def access_var(self, id_, lineno, scope=None, default_type=None): result = self.access_id(id_, lineno, scope, default_type) if result is None: return None if not self.check_class(id_, CLASS.var, lineno, scope): return None assert isinstance(result, symbols.VAR) result.class_ = CLASS.var return result
Since ZX BASIC allows access to undeclared variables, we must allow them, and *implicitly* declare them if they are not declared already. This function just checks if the id_ exists and returns its entry so. Otherwise, creates an implicit declared variable entry and returns it. If the --strict command line flag is enabled (or #pragma option explicit is in use) checks ensures the id_ is already declared. Returns None on error.
5,004
def complete_run(self, text, line, b, e): forth_files = [f.split(os.path.sep)[-1] for f in forth_files] return forth_files
Autocomplete file names with .forth ending.
5,005
def get_assign_groups(line, ops=ops): group = [] for item in line: group.append(item) if item in ops: yield group group = [] yield group
Split a line into groups by assignment (including augmented assignment)
5,006
def decode_exactly(code, bits_per_char=6): assert bits_per_char in (2, 4, 6) if len(code) == 0: return 0., 0., _LNG_INTERVAL[1], _LAT_INTERVAL[1] bits = len(code) * bits_per_char level = bits >> 1 dim = 1 << level code_int = decode_int(code, bits_per_char) if CYTHON_AVAILABLE and bits <= MAX_BITS: x, y = hash2xy_cython(code_int, dim) else: x, y = _hash2xy(code_int, dim) lng, lat = _int2coord(x, y, dim) lng_err, lat_err = _lvl_error(level) return lng + lng_err, lat + lat_err, lng_err, lat_err
Decode a geohash on a hilbert curve as a lng/lat position with error-margins Decodes the geohash `code` as a lng/lat position with error-margins. It assumes, that the length of `code` corresponds to the precision! And that each character in `code` encodes `bits_per_char` bits. Do not mix geohashes with different `bits_per_char`! Parameters: code: str The geohash to decode. bits_per_char: int The number of bits per coding character Returns: Tuple[float, float, float, float]: (lng, lat, lng-error, lat-error) coordinate for the geohash.
5,007
def renew(cls, fqdn, duration, background): fqdn = fqdn.lower() if not background and not cls.intty(): background = True domain_info = cls.info(fqdn) current_year = domain_info[].year domain_params = { : duration, : current_year, } result = cls.call(, fqdn, domain_params) if background: return result cls.echo() cls.display_progress(result) cls.echo( % fqdn)
Renew a domain.
5,008
def get_plotable3d(self): polyhedra = sum([polyhedron.get_plotable3d() for polyhedron in self.polyhedra], []) return polyhedra + self.surface.get_plotable3d()
:returns: matplotlib Poly3DCollection :rtype: list of mpl_toolkits.mplot3d
5,009
def _sync_from_disk(self): try: fobj = self.open_manifest() except IOError as e: if e.errno == errno.ENOENT: raise ValueError( "couldn't find manifest file in %s" % (self.path,)) elif e.errno == errno.ENOTDIR: raise ValueError("%s is not a directory" % (self.path,)) else: raise with fobj: self.contents = json.load(fobj) self._set_defaults() self._check_refpkg()
Read any changes made on disk to this Refpkg. This is necessary if other programs are making changes to the Refpkg on disk and your program must be synchronized to them.
5,010
def ready_argument_list(self, arguments): gpu_args = [] for arg in arguments: if isinstance(arg, numpy.ndarray): gpu_args.append(cl.Buffer(self.ctx, self.mf.READ_WRITE | self.mf.COPY_HOST_PTR, hostbuf=arg)) else: gpu_args.append(arg) return gpu_args
ready argument list to be passed to the kernel, allocates gpu mem :param arguments: List of arguments to be passed to the kernel. The order should match the argument list on the OpenCL kernel. Allowed values are numpy.ndarray, and/or numpy.int32, numpy.float32, and so on. :type arguments: list(numpy objects) :returns: A list of arguments that can be passed to an OpenCL kernel. :rtype: list( pyopencl.Buffer, numpy.int32, ... )
5,011
def start(self, input_data, output_data, transform_resources, **kwargs): self.transform_resources = transform_resources self.input_data = input_data self.output_data = output_data image = self.primary_container[] instance_type = transform_resources[] instance_count = 1 environment = self._get_container_environment(**kwargs) self.container = _SageMakerContainer(instance_type, instance_count, image, self.local_session) self.container.serve(self.primary_container[], environment) serving_port = get_config_value(, self.local_session.config) or 8080 _wait_for_serving_container(serving_port) endpoint_url = % serving_port response, code = _perform_request(endpoint_url) if code == 200: execution_parameters = json.loads(response.read()) for setting in (, ): if setting not in kwargs and setting in execution_parameters: kwargs[setting] = execution_parameters[setting] kwargs.update(self._get_required_defaults(**kwargs)) self.start_time = datetime.datetime.now() self.batch_strategy = kwargs[] if in kwargs: self.environment = kwargs[] self._perform_batch_inference(input_data, output_data, **kwargs) self.end_time = datetime.datetime.now() self.state = self._COMPLETED
Start the Local Transform Job Args: input_data (dict): Describes the dataset to be transformed and the location where it is stored. output_data (dict): Identifies the location where to save the results from the transform job transform_resources (dict): compute instances for the transform job. Currently only supports local or local_gpu **kwargs: additional arguments coming from the boto request object
5,012
def id(self): except KeyError: pass chip_id = self.detector.chip.id board_id = None if chip_id == ap_chip.BCM2XXX: board_id = self._pi_id() elif chip_id == ap_chip.AM33XX: board_id = self._beaglebone_id() elif chip_id == ap_chip.GENERIC_X86: board_id = GENERIC_LINUX_PC elif chip_id == ap_chip.SUN8I: board_id = self._armbian_id() elif chip_id == ap_chip.SAMA5: board_id = self._sama5_id() elif chip_id == ap_chip.ESP8266: board_id = FEATHER_HUZZAH elif chip_id == ap_chip.SAMD21: board_id = FEATHER_M0_EXPRESS elif chip_id == ap_chip.STM32: board_id = PYBOARD elif chip_id == ap_chip.S805: board_id = ODROID_C1 elif chip_id == ap_chip.S905: board_id = ODROID_C2 elif chip_id == ap_chip.FT232H: board_id = FTDI_FT232H elif chip_id in (ap_chip.T210, ap_chip.T186, ap_chip.T194): board_id = self._tegra_id() return board_id
Return a unique id for the detected board, if any.
5,013
def _job_to_text(self, job): next_run = self._format_date(job.get(, None)) tasks = for task in job.get(, []): tasks += self._task_to_text(task) tasks += return .join([ % job.get(, None), % job.get(, None), % next_run, , % job.get(, None), % job.get(, None), , , , tasks])
Return a standard formatting of a Job serialization.
5,014
def load_mlf(filename, utf8_normalization=None): with codecs.open(filename, , ) as f: data = f.read().decode() if utf8_normalization: data = unicodedata.normalize(utf8_normalization, data) mlfs = {} for mlf_object in HTK_MLF_RE.finditer(data): mlfs[mlf_object.group()] = [[Label(**mo.groupdict()) for mo in HTK_HYPOTHESIS_RE.finditer(recognition_data)] for recognition_data in re.split(r, mlf_object.group())] return mlfs
Load an HTK Master Label File. :param filename: The filename of the MLF file. :param utf8_normalization: None
5,015
def base64(self, charset=None): return b64encode(self.bytes()).decode(charset or self.charset)
Data encoded as base 64
5,016
def streamline(self): t = time.time() self.language.streamline() log.info(, self.__class__.__name__, time.time() - t)
Streamline the language represented by this parser to make queries run faster.
5,017
def _make_session(connection: Optional[str] = None) -> Session: if connection is None: connection = get_global_connection() engine = create_engine(connection) create_all(engine) session_cls = sessionmaker(bind=engine) session = session_cls() return session
Make a session.
5,018
def _on_rpc_done(self, future): _LOGGER.info("RPC termination has signaled manager shutdown.") future = _maybe_wrap_exception(future) thread = threading.Thread( name=_RPC_ERROR_THREAD_NAME, target=self.close, kwargs={"reason": future} ) thread.daemon = True thread.start()
Triggered whenever the underlying RPC terminates without recovery. This is typically triggered from one of two threads: the background consumer thread (when calling ``recv()`` produces a non-recoverable error) or the grpc management thread (when cancelling the RPC). This method is *non-blocking*. It will start another thread to deal with shutting everything down. This is to prevent blocking in the background consumer and preventing it from being ``joined()``.
5,019
def remove(self, obj, commit=True): database = self._database(writable=True) database.delete_document(TERM_PREFIXES[ID] + get_identifier(obj)) database.close()
Remove indexes for `obj` from the database. We delete all instances of `Q<app_name>.<model_name>.<pk>` which should be unique to this object. Optional arguments: `commit` -- ignored
5,020
def get_relationship(self, relationship_id): collection = JSONClientValidated(, collection=, runtime=self._runtime) result = collection.find_one( dict({: ObjectId(self._get_id(relationship_id, ).get_identifier())}, **self._view_filter())) return objects.Relationship(osid_object_map=result, runtime=self._runtime, proxy=self._proxy)
Gets the ``Relationship`` specified by its ``Id``. arg: relationship_id (osid.id.Id): the ``Id`` of the ``Relationship`` to retrieve return: (osid.relationship.Relationship) - the returned ``Relationship`` raise: NotFound - no ``Relationship`` found with the given ``Id`` raise: NullArgument - ``relationship_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
5,021
def add_column(self, position, source_header, datatype, **kwargs): from ..identity import GeneralNumber2 c = self.column(source_header) c_by_pos = self.column(position) datatype = if datatype == else datatype assert not c or not c_by_pos or c.vid == c_by_pos.vid if in kwargs: FALSE_VALUES = [, , , , , None, 0, ] kwargs[] = False if kwargs[] in FALSE_VALUES else True if c: assert not c_by_pos or c_by_pos.vid == c.vid c.update( position=position, datatype=datatype.__name__ if isinstance(datatype, type) else datatype, **kwargs) elif c_by_pos: c = SourceColumn( vid=str(GeneralNumber2(, self.d_vid, self.sequence_id, int(position))), position=position, st_vid=self.vid, d_vid=self.d_vid, datatype=datatype.__name__ if isinstance(datatype, type) else datatype, source_header=source_header, **kwargs) self.columns.append(c) return c
Add a column to the source table. :param position: Integer position of the column started from 1. :param source_header: Name of the column, as it exists in the source file :param datatype: Python datatype ( str, int, float, None ) for the column :param kwargs: Other source record args. :return:
5,022
def freeze_graph_tpu(model_path): assert model_path assert FLAGS.tpu_name if FLAGS.tpu_name.startswith(): tpu_grpc_url = FLAGS.tpu_name else: tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver( FLAGS.tpu_name, zone=None, project=None) tpu_grpc_url = tpu_cluster_resolver.get_master() sess = tf.Session(tpu_grpc_url) output_names = [] with sess.graph.as_default(): replicated_features = [] for i in range(FLAGS.num_tpu_cores): features = tf.placeholder( tf.float32, [None, go.N, go.N, features_lib.NEW_FEATURES_PLANES], name= % i) replicated_features.append((features,)) outputs = tf.contrib.tpu.replicate( tpu_model_inference_fn, replicated_features) for i, (policy_output, value_output, _) in enumerate(outputs): policy_name = % i value_name = % i output_names.extend([policy_name, value_name]) tf.identity(policy_output, policy_name) tf.identity(value_output, value_name) tf.train.Saver().restore(sess, model_path) model_def = tf.graph_util.convert_variables_to_constants( sess, sess.graph.as_graph_def(), output_names) with tf.gfile.GFile(model_path + , ) as f: f.write(model_def.SerializeToString())
Custom freeze_graph implementation for Cloud TPU.
5,023
def calculate_size(transaction_id, thread_id): data_size = 0 data_size += calculate_size_str(transaction_id) data_size += LONG_SIZE_IN_BYTES return data_size
Calculates the request payload size
5,024
def merge(self, other): other = IntervalCell.coerce(other) if self.is_equal(other): return self elif other.is_entailed_by(self): return self elif self.is_entailed_by(other): self.low, self.high = other.low, other.high elif self.is_contradictory(other): raise Contradiction("Cannot merge [%0.2f, %0.2f] with [%0.2f, %0.2f]" \ % (self.low, self.high, other.low, other.high)) else: self.low = max(self.low, other.low) self.high = min(self.high, other.high) return self
Merges the two values
5,025
def operate(config): "Interface to do simple operations on the database." app = make_app(config=config) print "Operate Mode" with app.app_context(): operate_menu()
Interface to do simple operations on the database.
5,026
def update_port_ip_address(self): leases = None req = dict(ip=) instances = self.get_vms_for_this_req(**req) if instances is None: return for vm in instances: if not leases: leases = self._get_ip_leases() if not leases: return for line in leases: if line.startswith() and line.endswith(): ip_addr = line.split()[1] if in line: if vm.mac == line.replace(, ).split()[2]: LOG.info(, {: ip_addr, : vm.mac}) try: rule_info = dict(ip=ip_addr, mac=vm.mac, port=vm.port_id, status=) self.neutron_event.update_ip_rule(str(vm.host), str(rule_info)) except (rpc.MessagingTimeout, rpc.RPCException, rpc.RemoteError): LOG.error("RPC error: Failed to update" "rules.") else: params = dict(columns=dict(ip=ip_addr)) self.update_vm_db(vm.port_id, **params) vm_info = dict(status=vm.status, vm_mac=vm.mac, segmentation_id=vm.segmentation_id, host=vm.host, port_uuid=vm.port_id, net_uuid=vm.network_id, oui=dict(ip_addr=ip_addr, vm_name=vm.name, vm_uuid=vm.instance_id, gw_mac=vm.gw_mac, fwd_mod=vm.fwd_mod, oui_id=)) try: self.neutron_event.send_vm_info(vm.host, str(vm_info)) except (rpc.MessagingTimeout, rpc.RPCException, rpc.RemoteError): LOG.error( )
Find the ip address that assinged to a port via DHCP The port database will be updated with the ip address.
5,027
def _create_threads(self): creator = JobCreator( self.config, self.observers.jobs, self.logger ) self.jobs = creator.job_factory()
This method creates job instances.
5,028
def base_exception_handler(*args): header, frames, trcback = format_report(*extract_exception(*args)) LOGGER.error("!> {0}".format(Constants.logging_separators)) map(lambda x: LOGGER.error("!> {0}".format(x)), header) LOGGER.error("!> {0}".format(Constants.logging_separators)) map(lambda x: LOGGER.error("!> {0}".format(x)), frames) LOGGER.error("!> {0}".format(Constants.logging_separators)) sys.stderr.write("\n".join(trcback)) return True
Provides the base exception handler. :param \*args: Arguments. :type \*args: \* :return: Definition success. :rtype: bool
5,029
def generate_seasonal_averages(qout_file, seasonal_average_file, num_cpus=multiprocessing.cpu_count()): with RAPIDDataset(qout_file) as qout_nc_file: print("Generating seasonal average file ...") seasonal_avg_nc = Dataset(seasonal_average_file, ) seasonal_avg_nc.createDimension(, qout_nc_file.size_river_id) seasonal_avg_nc.createDimension(, 365) time_series_var = seasonal_avg_nc.createVariable(, , (,)) time_series_var.long_name = ( ) average_flow_var = \ seasonal_avg_nc.createVariable(, , (, )) average_flow_var.long_name = average_flow_var.units = std_dev_flow_var = \ seasonal_avg_nc.createVariable(, , (, )) std_dev_flow_var.long_name = std_dev_flow_var.units = std_dev_flow_var = \ seasonal_avg_nc.createVariable(, , (, )) std_dev_flow_var.long_name = std_dev_flow_var.units = std_dev_flow_var = \ seasonal_avg_nc.createVariable(, , (, )) std_dev_flow_var.long_name = std_dev_flow_var.units = lat_var = seasonal_avg_nc.createVariable(, , (,), fill_value=-9999.0) lon_var = seasonal_avg_nc.createVariable(, , (,), fill_value=-9999.0) add_latlon_metadata(lat_var, lon_var) seasonal_avg_nc.variables[][:] = \ qout_nc_file.qout_nc.variables[][:] seasonal_avg_nc.variables[][:] = \ qout_nc_file.qout_nc.variables[][:] river_id_list = qout_nc_file.get_river_id_array() seasonal_avg_nc.variables[][:] = river_id_list seasonal_avg_nc.close() mp_lock = multiprocessing.Manager().Lock() job_combinations = [] for day_of_year in range(1, 366): job_combinations.append((qout_file, seasonal_average_file, day_of_year, mp_lock )) pool = multiprocessing.Pool(num_cpus) pool.map(generate_single_seasonal_average, job_combinations) pool.close() pool.join()
This function loops through a CF compliant rapid streamflow file to produce a netCDF file with a seasonal average for 365 days a year
5,030
def _get_caller_globals_and_locals(): caller_frame = inspect.stack()[2] myglobals = caller_frame[0].f_globals mylocals = caller_frame[0].f_locals return myglobals, mylocals
Returns the globals and locals of the calling frame. Is there an alternative to frame hacking here?
5,031
def _get_session(self): if self.port in (465, "465"): session = self._get_ssl() elif self.port in (587, "587"): session = self._get_tls() try: session.login(self.from_, self._auth) except SMTPResponseException as e: raise MessageSendError(e.smtp_error.decode("unicode_escape")) return session
Start session with email server.
5,032
def inline(self) -> str: return "{0}:{1}:{2}:{3}:{4}".format(self.issuer, self.signatures[0], self.membership_ts, self.identity_ts, self.uid)
Return inline string format of the Membership instance :return:
5,033
def fit(self, train_set, test_set): with tf.Graph().as_default(), tf.Session() as self.tf_session: self.build_model() tf.global_variables_initializer().run() third = self.num_epochs // 3 for i in range(self.num_epochs): lr_decay = self.lr_decay ** max(i - third, 0.0) self.tf_session.run( tf.assign(self.lr_var, tf.multiply(self.learning_rate, lr_decay))) train_perplexity = self._run_train_step(train_set, ) print("Epoch: %d Train Perplexity: %.3f" % (i + 1, train_perplexity)) test_perplexity = self._run_train_step(test_set, ) print("Test Perplexity: %.3f" % test_perplexity)
Fit the model to the given data. :param train_set: training data :param test_set: test data
5,034
def add_slide(self, slide_layout): partname = self._next_slide_partname slide_layout_part = slide_layout.part slide_part = SlidePart.new(partname, self.package, slide_layout_part) rId = self.relate_to(slide_part, RT.SLIDE) return rId, slide_part.slide
Return an (rId, slide) pair of a newly created blank slide that inherits appearance from *slide_layout*.
5,035
def parse_mixed_delim_str(line): arrs = [[], [], []] for group in line.split(): for col, coord in enumerate(group.split()): if coord: arrs[col].append(int(coord)) return [tuple(arr) for arr in arrs]
Turns .obj face index string line into [verts, texcoords, normals] numeric tuples.
5,036
def _get_filename(self): if self._fname is None: timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") fname = "%s-%s.log" % (timestamp, abs(id(self))) self._fname = os.path.join(self.file_path, fname) return self._fname
Return a unique file name.
5,037
def devices_l(self) -> Dict: output, _ = self._execute(, ) devices = output.split()[4::6] models = output.split()[7::6] return dict(zip(devices, models))
List connected devices (-l for long output).
5,038
def _create_sequences(self): try: self.pdb.construct_pdb_to_rosetta_residue_map(self.rosetta_scripts_path, rosetta_database_path = self.rosetta_database_path, cache_dir = self.cache_dir) except PDBMissingMainchainAtomsException: self.pdb_to_rosetta_residue_map_error = True if self.pdb_id not in do_not_use_the_sequence_aligner: self.uniparc_sequences = self.PDB_UniParc_SA.uniparc_sequences else: self.uniparc_sequences = self.sifts.get_uniparc_sequences() self.fasta_sequences = self.FASTA.get_sequences(self.pdb_id) self.seqres_sequences = self.pdb.seqres_sequences self.atom_sequences = self.pdb.atom_sequences if self.pdb_to_rosetta_residue_map_error: self.rosetta_sequences = {} for c in self.atom_sequences.keys(): self.rosetta_sequences[c] = Sequence() else: self.rosetta_sequences = self.pdb.rosetta_sequences uniparc_pdb_chain_mapping = {} if self.pdb_id not in do_not_use_the_sequence_aligner: for pdb_chain_id, matches in self.PDB_UniParc_SA.clustal_matches.iteritems(): if matches: uniparc_chain_id = matches.keys()[0] assert(len(matches) == 1) uniparc_pdb_chain_mapping[uniparc_chain_id] = uniparc_pdb_chain_mapping.get(uniparc_chain_id, []) uniparc_pdb_chain_mapping[uniparc_chain_id].append(pdb_chain_id) else: for pdb_chain_id, uniparc_chain_ids in self.sifts.get_pdb_chain_to_uniparc_id_map().iteritems(): for uniparc_chain_id in uniparc_chain_ids: uniparc_pdb_chain_mapping[uniparc_chain_id] = uniparc_pdb_chain_mapping.get(uniparc_chain_id, []) uniparc_pdb_chain_mapping[uniparc_chain_id].append(pdb_chain_id) for uniparc_chain_id, pdb_chain_ids in uniparc_pdb_chain_mapping.iteritems(): sequence_type = set([self.seqres_sequences[p].sequence_type for p in pdb_chain_ids]) assert(len(sequence_type) == 1) sequence_type = sequence_type.pop() assert(self.uniparc_sequences[uniparc_chain_id].sequence_type == None) self.uniparc_sequences[uniparc_chain_id].set_type(sequence_type) for p in pdb_chain_ids: self.pdb_chain_to_uniparc_chain_mapping[p] = uniparc_chain_id for chain_id, sequence in self.seqres_sequences.iteritems(): self.fasta_sequences[chain_id].set_type(sequence.sequence_type)
Get all of the Sequences - Rosetta, ATOM, SEQRES, FASTA, UniParc.
5,039
def _sort_tensor(tensor): sorted_, _ = tf.nn.top_k(tensor, k=tf.shape(input=tensor)[-1]) sorted_.set_shape(tensor.shape) return sorted_
Use `top_k` to sort a `Tensor` along the last dimension.
5,040
def callback(self): self._callback(*self._args, **self._kwargs) self._last_checked = time.time()
Run the callback
5,041
def pick_q_v1(self): inl = self.sequences.inlets.fastaccess new = self.sequences.states.fastaccess_new new.qjoints[0] = 0. for idx in range(inl.len_q): new.qjoints[0] += inl.q[idx][0]
Assign the actual value of the inlet sequence to the upper joint of the subreach upstream.
5,042
def html(self): failure = "" skipped = None stdout = tag.text(self.stdout) stderr = tag.text(self.stderr) if self.skipped: skipped = .format(msg=tag.text(self.skipped_msg), skip=tag.text(self.skipped)) if self.failed(): failure = .format(msg=tag.text(self.failure_msg), fail=tag.text(self.failure)) properties = [x.html() for x in self.properties] return .format(anchor=self.anchor(), testname=self.name, testclassname=self.testclass.name, duration=self.duration, failure=failure, skipped=skipped, properties="".join(properties), stdout=stdout, stderr=stderr)
Render this test case as HTML :return:
5,043
def print_async_event(self, suffix, event): tag if not isinstance(event, dict): return if suffix in (,): return try: outputter = self.opts.get(, event.get(, None) or event.get().get()) except AttributeError: outputter = None if suffix == : if isinstance(event.get(), dict) \ and set(event[]) == set((, )): event_data = event[][] outputter = event[][] else: event_data = event[] else: event_data = {: suffix, : event} salt.output.display_output(event_data, outputter, self.opts)
Print all of the events with the prefix 'tag'
5,044
def render_html(self): return self._template.safe_substitute( report_type=self._report_type, results=self.render_json() )
Render an HTML report.
5,045
def _finish_disconnection_action(self, action): success = action.data[] conn_key = action.data[] if self._get_connection_state(conn_key) != self.Disconnecting: self._logger.error("Invalid finish_disconnection action on a connection whose state is not Disconnecting, conn_key=%s", str(conn_key)) return data = self._get_connection(conn_key) callback = data[] conn_id = data[] int_id = data[] if success is False: reason = action.data[] if reason is None: reason = "No reason was given" data[] = self.Idle data[] = None data[] = None callback(conn_id, self.id, False, reason) else: del self._connections[conn_id] del self._int_connections[int_id] callback(conn_id, self.id, True, None)
Finish a disconnection attempt There are two possible outcomes: - if we were successful at disconnecting, we transition to disconnected - if we failed at disconnecting, we transition back to idle Args: action (ConnectionAction): the action object describing what we are disconnecting from and what the result of the operation was
5,046
def unsign_data(self, data, url_safe=True): if url_safe: return utils.unsign_url_safe(data, secret_key=self.secret_key, salt=self.user_salt) else: return utils.unsign_data(data, secret_key=self.secret_key, salt=self.user_salt)
Retrieve the signed data. If it is expired, it will throw an exception :param data: token/signed data :param url_safe: bool. If true it will allow it to be passed in URL :return: mixed, the data in its original form
5,047
def execute(self, args, kwargs): return self.lookup_explicit(args, kwargs)(*args, **kwargs)
Dispatch a call. Call the first function whose type signature matches the arguemts.
5,048
def session(self, session=None): if self.related_instance: session = self.related_instance.session if session is None: raise QuerySetError() return session
Override :meth:`Manager.session` so that this :class:`RelatedManager` can retrieve the session from the :attr:`related_instance` if available.
5,049
def eval_py(self, _globals, _locals): try: params = eval(self.script, _globals, _locals) except NameError as e: raise Exception( .format(str(e)) ) except ResolutionError as e: raise Exception(.format(str(e))) return params
Evaluates a file containing a Python params dictionary.
5,050
def _encode_params(**kw): R&Da=1&b=R%26D\u4e2d\u6587ABa=%E4%B8%AD%E6%96%87&b=A&b=B&b=123 args = [] for k, v in kw.iteritems(): if isinstance(v, basestring): qv = v.encode() if isinstance(v, unicode) else v args.append( % (k, urllib.quote(qv))) elif isinstance(v, collections.Iterable): for i in v: qv = i.encode() if isinstance(i, unicode) else str(i) args.append( % (k, urllib.quote(qv))) else: qv = str(v) args.append( % (k, urllib.quote(qv))) return .join(args)
do url-encode parameters >>> _encode_params(a=1, b='R&D') 'a=1&b=R%26D' >>> _encode_params(a=u'\u4e2d\u6587', b=['A', 'B', 123]) 'a=%E4%B8%AD%E6%96%87&b=A&b=B&b=123'
5,051
def attach(gandi, disk, vm, position, read_only, background, force): if not force: proceed = click.confirm("Are you sure you want to attach disk " " to vm ?" % (disk, vm)) if not proceed: return disk_info = gandi.disk.info(disk) attached = disk_info.get(, False) if attached and not force: gandi.echo() proceed = click.confirm( % disk) if not proceed: return result = gandi.disk.attach(disk, vm, background, position, read_only) if background and result: gandi.pretty_echo(result) return result
Attach disk to vm. disk can be a disk name, or ID vm can be a vm name, or ID
5,052
def shorten_text(self, text): if len(text) > self.width: return text[:self.width - 3] + return text
Shortens text to fit into the :attr:`width`.
5,053
def GetIPAddresses(self): results = [] for address in self.addresses: human_readable_address = address.human_readable_address if human_readable_address is not None: results.append(human_readable_address) return results
Return a list of IP addresses.
5,054
def run(self, arguments, show_help=True): if self.use_sys: if not gf.FROZEN: if sys.stdin.encoding not in ["UTF-8", "UTF8"]: self.print_warning(u"The default input encoding is not UTF-8.") self.print_warning(u"You might want to set in your shell.") if sys.stdout.encoding not in ["UTF-8", "UTF8"]: self.print_warning(u"The default output encoding is not UTF-8.") self.print_warning(u"You might want to set in your shell.") args = [gf.safe_unicode_stdin(arg) for arg in arguments] else: args = [gf.safe_unicode(arg) for arg in arguments] if show_help: if u"-h" in args: return self.print_help(short=True) if u"--help" in args: return self.print_help(short=False) if u"--help-rconf" in args: return self.print_rconf_parameters() if u"--version" in args: return self.print_name_version() self.formal_arguments_raw = arguments self.formal_arguments = args args = args[1:] set_args = set(args) for flag in set([u"-v", u"--verbose"]) & set_args: self.verbose = True args.remove(flag) for flag in set([u"-vv", u"--very-verbose"]) & set_args: self.verbose = True self.very_verbose = True args.remove(flag) for flag in [u"-r", u"--runtime-configuration"]: rconf_string = self.has_option_with_value(flag, actual_arguments=False) if rconf_string is not None: self.rconf = RuntimeConfiguration(rconf_string) args.remove("%s=%s" % (flag, rconf_string)) log_path = None for flag in [u"-l", u"--log"]: log_path = self.has_option_with_value(flag, actual_arguments=False) if log_path is not None: args.remove("%s=%s" % (flag, log_path)) elif flag in set_args: handler, log_path = gf.tmp_file(suffix=u".log", root=self.rconf[RuntimeConfiguration.TMP_PATH]) args.remove(flag) if log_path is not None: self.log_file_path = log_path if (len(args) < 1) and (show_help): return self.print_help(short=True) self.actual_arguments = args self.logger = Logger(tee=self.verbose, tee_show_datetime=self.very_verbose) self.log([u"Running aeneas %s", aeneas_version]) self.log([u"Formal arguments: %s", self.formal_arguments]) self.log([u"Actual arguments: %s", self.actual_arguments]) self.log([u"Runtime configuration: ", self.rconf.config_string]) exit_code = self.perform_command() self.log([u"Execution completed with code %d", exit_code]) if self.log_file_path is not None: self.log([u"User requested saving log to file ", self.log_file_path]) self.logger.write(self.log_file_path) if self.use_sys: self.print_info(u"Log written to file " % self.log_file_path) return self.exit(exit_code)
Program entry point. Please note that the first item in ``arguments`` is discarded, as it is assumed to be the script/invocation name; pass a "dumb" placeholder if you call this method with an argument different that ``sys.argv``. :param arguments: the list of arguments :type arguments: list :param show_help: if ``False``, do not show help on ``-h`` and ``--help`` :type show_help: bool :rtype: int
5,055
def _GetDirectory(self): if self.entry_type != definitions.FILE_ENTRY_TYPE_DIRECTORY: return None return VShadowDirectory(self._file_system, self.path_spec)
Retrieves a directory. Returns: VShadowDirectory: a directory None if not available.
5,056
def _CheckKeyPath(self, registry_key, search_depth): if self._key_path_segments is None: return False if search_depth < 0 or search_depth > self._number_of_key_path_segments: return False if search_depth == 0: segment_name = else: segment_name = self._key_path_segments[search_depth - 1] if self._is_regex: if isinstance(segment_name, py2to3.STRING_TYPES): flags = re.DOTALL | re.IGNORECASE | re.UNICODE try: segment_name = r.format(segment_name) segment_name = re.compile(segment_name, flags=flags) except sre_constants.error: return False self._key_path_segments[search_depth - 1] = segment_name else: segment_name = segment_name.lower() self._key_path_segments[search_depth - 1] = segment_name if search_depth > 0: if self._is_regex: if not segment_name.match(registry_key.name): return False elif segment_name != registry_key.name.lower(): return False return True
Checks the key path find specification. Args: registry_key (WinRegistryKey): Windows Registry key. search_depth (int): number of key path segments to compare. Returns: bool: True if the Windows Registry key matches the find specification, False if not.
5,057
def _format_time(seconds): minutes = seconds // 60 hours = minutes // 60 rtn = u.format(minutes % 60, seconds % 60) if hours: rtn = u.format(int(hours % 24), rtn) days = int(hours // 24) if days: rtn = u.format(days, rtn) return rtn
Args: seconds (float): amount of time Format time string for eta and elapsed
5,058
def parse_size(image, size): bits = size.split("x") if image.size[0] == 0 or image.size[1] == 0: ratio = 1.0 else: ratio = float(image.size[0]) / float(image.size[1]) if len(bits) == 1 or not bits[1]: width = int(bits[0]) height = int(1 / ratio * width) elif not bits[0]: height = int(bits[1]) width = int(height * ratio) else: width, height = map(int, bits) return width, height
Parse a size string (i.e. "200", "200x100", "x200", etc.) into a (width, height) tuple.
5,059
def _read(self, directory, filename, session, path, name, extension, spatial, spatialReferenceID, replaceParamFile): self.fileExtension = extension KEYWORDS = {: spc.connectChunk, : spc.sjuncChunk, : spc.slinkChunk} sjuncs = [] slinks = [] connections = [] with open(path, ) as f: chunks = pt.chunk(KEYWORDS, f) for key, chunkList in iteritems(chunks): for chunk in chunkList: result = KEYWORDS[key](key, chunk) if key == : connections.append(result) elif key == : sjuncs.append(result) elif key == : slinks.append(result) self._createConnection(connections) self._createSjunc(sjuncs) self._createSlink(slinks)
Storm Pipe Network File Read from File Method
5,060
def _consolidate_coordinateList( self, coordinateList): self.log.debug() raList = [] raList[:] = np.array([c[0] for c in coordinateList]) decList = [] decList[:] = np.array([c[1] for c in coordinateList]) nedStreamRadius = self.settings[ "ned stream search radius arcec"] / (60. * 60.) firstPassNedSearchRadius = self.settings[ "first pass ned search radius arcec"] / (60. * 60.) radius = nedStreamRadius - firstPassNedSearchRadius return updatedCoordianteList
*match the coordinate list against itself with the parameters of the NED search queries to minimise duplicated NED queries* **Key Arguments:** - ``coordinateList`` -- the original coordinateList. **Return:** - ``updatedCoordinateList`` -- the coordinate list with duplicated search areas removed **Usage:** .. todo:: - add usage info - create a sublime snippet for usage - update package tutorial if needed .. code-block:: python usage code .. todo :: - update key arguments values and definitions with defaults - update return values and definitions - update usage examples and text - update docstring text - check sublime snippet exists - clip any useful text to docs mindmap - regenerate the docs and check redendering of this docstring
5,061
def cashFlow(symbol, token=, version=): _raiseIfNotStr(symbol) return _getJson( + symbol + , token, version)
Pulls cash flow data. Available quarterly (4 quarters) or annually (4 years). https://iexcloud.io/docs/api/#cash-flow Updates at 8am, 9am UTC daily Args: symbol (string); Ticker to request token (string); Access token version (string); API version Returns: dict: result
5,062
def get_sd_auth(val, sd_auth_pillar_name=): * sd_pillar = __pillar__.get(sd_auth_pillar_name) log.debug(, sd_pillar) if not sd_pillar: log.error(, sd_auth_pillar_name) raise CommandExecutionError( .format(sd_auth_pillar_name) ) try: return sd_pillar[val] except KeyError: log.error(, val) raise CommandExecutionError(.format(val))
Returns requested Server Density authentication value from pillar. CLI Example: .. code-block:: bash salt '*' serverdensity_device.get_sd_auth <val>
5,063
def p_array_literal_2(self, p): items = p[2] if len(p) == 6: items.extend(p[4]) p[0] = ast.Array(items=items)
array_literal : LBRACKET element_list RBRACKET | LBRACKET element_list COMMA elision_opt RBRACKET
5,064
def sort_pkglist(pkgs): *["3.45", "2.13"] try: for key in pkgs: pkgs[key] = sorted(set(pkgs[key])) except AttributeError as exc: log.exception(exc)
Accepts a dict obtained from pkg.list_pkgs() and sorts in place the list of versions for any packages that have multiple versions installed, so that two package lists can be compared to one another. CLI Example: .. code-block:: bash salt '*' pkg_resource.sort_pkglist '["3.45", "2.13"]'
5,065
def get_block_from_time(self, timestring, error_margin=10): known_block = self.get_current_block()[] known_block_timestamp = self.block_timestamp(known_block) timestring_timestamp = parse_time(timestring).timestamp() delta = known_block_timestamp - timestring_timestamp block_delta = delta / 3 guess_block = known_block - block_delta guess_block_timestamp = self.block_timestamp(guess_block) error = timestring_timestamp - guess_block_timestamp while abs(error) > error_margin: guess_block += error / 3 guess_block_timestamp = self.block_timestamp(guess_block) error = timestring_timestamp - guess_block_timestamp return int(guess_block)
Estimate block number from given time :param str timestring: String representing time :param int error_margin: Estimate block number within this interval (in seconds)
5,066
def retry(self, delay=0, group=None, message=None): args = [, self.jid, self.queue_name, self.worker_name, delay] if group is not None and message is not None: args.append(group) args.append(message) return self.client(*args)
Retry this job in a little bit, in the same queue. This is meant for the times when you detect a transient failure yourself
5,067
def to_python(self, value: Optional[str]) -> Optional[Any]: if isinstance(value, datetime.datetime): return value if value is None: return value if value == : return None return iso_string_to_python_datetime(value)
Called during deserialization and during form ``clean()`` calls. Must deal with an instance of the correct type; a string; or ``None`` (if the field allows ``null=True``). Should raise ``ValidationError`` if problems.
5,068
def _compile_lock(self, query, value): if isinstance(value, basestring): return value if value is True: return elif value is False: return
Compile the lock into SQL :param query: A QueryBuilder instance :type query: QueryBuilder :param value: The lock value :type value: bool or str :return: The compiled lock :rtype: str
5,069
def check_file(filepath): check_path(filepath) if not os.path.exists(filepath): print("WARNING: File does not exist. Creating it: %s" % filepath) open(filepath, ).close() try: print("Setting access rights for %s for www-data user" % (filepath)) uid = pwd.getpwnam("www-data").pw_uid gid = grp.getgrnam("www-data").gr_gid os.chown(filepath, uid, gid) os.chmod(filepath, 0o660) except Exception: print("WARNING: Could not adjust file system permissions for %s. Make sure your web server can write into it." % filepath)
- Checks if the parent directories for this path exist. - Checks that the file exists. - Donates the file to the web server user. TODO: This is Debian / Ubuntu specific.
5,070
def create(self, request): login_form = AuthenticationForm(request, data=request.data) if not login_form.is_valid(): raise serializers.ValidationError(login_form.errors) auth_login(request, login_form.get_user()) serializer = UserSerializer(request.user) return Response(serializer.data, status=status.HTTP_200_OK)
Log in django staff user
5,071
def vars_to_array(self): logger.warn( ) if not self.vars: return None vars_matrix = matrix(self.vars, size=(self.vars[0].size[0], len(self.vars))).trans() self.vars_array = np.array(vars_matrix) return self.vars_array
Convert `self.vars` to a numpy array Returns ------- numpy.array
5,072
def same_origin(url1, url2): p1, p2 = urlparse(url1), urlparse(url2) try: o1 = (p1.scheme, p1.hostname, p1.port or PROTOCOL_TO_PORT[p1.scheme]) o2 = (p2.scheme, p2.hostname, p2.port or PROTOCOL_TO_PORT[p2.scheme]) return o1 == o2 except (ValueError, KeyError): return False
Return True if the urls have the same origin, else False. Copied from Django: https://github.com/django/django/blob/master/django/utils/http.py#L255
5,073
def p_partselect_pointer_minus(self, p): p[0] = Partselect(p[1], p[3], Minus( p[3], p[5], lineno=p.lineno(1)), lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
partselect : pointer LBRACKET expression MINUSCOLON expression RBRACKET
5,074
def validate_auth_option(option, value): lower, value = validate(option, value) if lower not in _AUTH_OPTIONS: raise ConfigurationError( % (option,)) return lower, value
Validate optional authentication parameters.
5,075
def json2space(x, oldy=None, name=NodeType.Root.value): y = list() if isinstance(x, dict): if NodeType.Type.value in x.keys(): _type = x[NodeType.Type.value] name = name + + _type if _type == : if oldy != None: _index = oldy[NodeType.Index.value] y += json2space(x[NodeType.Value.value][_index], oldy[NodeType.Value.value], name=name+ % _index) else: y += json2space(x[NodeType.Value.value], None, name=name) y.append(name) else: for key in x.keys(): y += json2space(x[key], (oldy[key] if oldy != None else None), name+"[%s]" % str(key)) elif isinstance(x, list): for i, x_i in enumerate(x): y += json2space(x_i, (oldy[i] if oldy != None else None), name+"[%d]" % i) else: pass return y
Change search space from json format to hyperopt format
5,076
def metropolis_hastings_step(current_state: State, proposed_state: State, energy_change: FloatTensor, seed=None) -> Tuple[State, tf.Tensor, tf.Tensor]: flat_current = tf.nest.flatten(current_state) flat_proposed = nest.flatten_up_to(current_state, proposed_state) return next_state, is_accepted, log_uniform
Metropolis-Hastings step. This probabilistically chooses between `current_state` and `proposed_state` based on the `energy_change` so as to preserve detailed balance. Energy change is the negative of `log_accept_ratio`. Args: current_state: Current state. proposed_state: Proposed state. energy_change: E(proposed_state) - E(previous_state). seed: For reproducibility. Returns: new_state: The chosen state. is_accepted: Whether the proposed state was accepted. log_uniform: The random number that was used to select between the two states.
5,077
def create_path_env_var(new_entries, env=None, env_var=, delimiter=, prepend=False): if env is None: env = {} prev_path = env.get(env_var, None) if prev_path is None: path_dirs = list() else: path_dirs = list(prev_path.split(delimiter)) new_entries_list = list(new_entries) if prepend: path_dirs = new_entries_list + path_dirs else: path_dirs += new_entries_list return delimiter.join(path_dirs)
Join path entries, combining with an environment variable if specified.
5,078
def is_grouping_sane(cls, gtype): if gtype == cls.SHUFFLE or gtype == cls.ALL or gtype == cls.LOWEST or gtype == cls.NONE: return True elif isinstance(gtype, cls.FIELDS): return gtype.gtype == topology_pb2.Grouping.Value("FIELDS") and \ gtype.fields is not None elif isinstance(gtype, cls.CUSTOM): return gtype.gtype == topology_pb2.Grouping.Value("CUSTOM") and \ gtype.python_serialized is not None else: return False
Checks if a given gtype is sane
5,079
def attribute_md5(self): def utf8(str): if isinstance(str, six.string_types): return str.encode() return str md5 = hashlib.md5() struct_format = "!I".encode() encoded += struct.pack(struct_format, len(utf8(name))) + utf8(name) encoded += struct.pack(struct_format, len(data_type)) + utf8(data_type) encoded += TRANSPORT_TYPE_ENCODINGS[data_type] if data_type == or data_type == : value = attr[] elif data_type == : print(data_type, attr[], type(attr[])) value = base64.b64decode(attr[]) else: print("Moto hasndeadbeefdeadbeefdeadbeefdeadbeef') encoded += struct.pack(struct_format, len(utf8(value))) + utf8(value) md5.update(encoded) return md5.hexdigest()
The MD5 of all attributes is calculated by first generating a utf-8 string from each attribute and MD5-ing the concatenation of them all. Each attribute is encoded with some bytes that describe the length of each part and the type of attribute. Not yet implemented: List types (https://github.com/aws/aws-sdk-java/blob/7844c64cf248aed889811bf2e871ad6b276a89ca/aws-java-sdk-sqs/src/main/java/com/amazonaws/services/sqs/MessageMD5ChecksumHandler.java#L58k)
5,080
def createRootJob(self, *args, **kwargs): rootJob = self.create(*args, **kwargs) self.setRootJob(rootJob.jobStoreID) return rootJob
Create a new job and set it as the root job in this job store :rtype: toil.jobGraph.JobGraph
5,081
def __set_no_protein(self, hgvs_string): no_protein_list = [, ] if hgvs_string in no_protein_list: self.is_no_protein = True self.is_non_silent = True else: self.is_no_protein = False
Set a flag for no protein expected. ("p.0" or "p.0?") Args: hgvs_string (str): hgvs syntax with "p." removed
5,082
def _SendRecv(): port = int(os.getenv(DEVSHELL_ENV, 0)) if port == 0: raise NoDevshellServer() sock = socket.socket() sock.connect((, port)) data = CREDENTIAL_INFO_REQUEST_JSON msg = .format(len(data), data) sock.sendall(_helpers._to_bytes(msg, encoding=)) header = sock.recv(6).decode() if not in header: raise CommunicationError() len_str, json_str = header.split(, 1) to_read = int(len_str) - len(json_str) if to_read > 0: json_str += sock.recv(to_read, socket.MSG_WAITALL).decode() return CredentialInfoResponse(json_str)
Communicate with the Developer Shell server socket.
5,083
def register(self, matchers, runnable): if getattr(self, , None) is not None and getattr(self, , None) is None: self.syscallrunnable = runnable else: for m in matchers: self.matchtree.insert(m, runnable) events = self.eventtree.findAndRemove(m) for e in events: self.queue.unblock(e) if m.indices[0] == PollEvent._classname0 and len(m.indices) >= 2: self.polling.onmatch(m.indices[1], None if len(m.indices) <= 2 else m.indices[2], True) self.registerIndex.setdefault(runnable, set()).update(matchers)
Register an iterator(runnable) to scheduler and wait for events :param matchers: sequence of EventMatchers :param runnable: an iterator that accept send method :param daemon: if True, the runnable will be registered as a daemon.
5,084
def length(self): item = self.head counter = 0 while item is not None: counter += 1 item = item.next_node return counter
Gets length :return: How many items in linked list of linked list
5,085
def baseline_correct(G): baseidx =[] baseidx.extend(range(np.min(np.where(G.f_ppm<5.0)),np.max(np.where(G.f_ppm>4.0))+1)) baseidx.extend(range(np.min(np.where(G.f_ppm<3.5)),np.max(np.where(G.f_ppm>3.2))+1)) baseidx.extend(range(np.min(np.where(G.f_ppm<2.8)),np.max(np.where(G.f_ppm>2.5))+1)) G.diff = np.mean(G.diff_spectra,0) yArr=np.real(G.diff[baseidx]) baseppm = G.f_ppm[baseidx] adjbaseppm =[baseppm[i] for i in np.where(baseppm<=np.max(G.f_ppm))[0]] f = interpolate.interp1d(adjbaseppm[::-1], yArr[::-1], kind=, bounds_error=True, fill_value=0) fitidxmax = np.where(G.f_ppm<np.max(adjbaseppm))[0] fitidxmin = np.where(G.f_ppm>np.min(adjbaseppm))[0] fitidx = list(set(fitidxmax) & set(fitidxmin)) basefit = f(G.f_ppm[fitidx]) adjusted = G.diff[fitidx]-basefit G.diff_corrected = G.diff G.diff_corrected[fitidx] = adjusted G.baseline_corrected = True
This function zeroes the baseline from 2.5ppm upwards
5,086
def download(course, tid=None, dl_all=False, force=False, upgradejava=False, update=False): def dl(id): download_exercise(Exercise.get(Exercise.tid == id), force=force, update_java=upgradejava, update=update) if dl_all: for exercise in list(course.exercises): dl(exercise.tid) elif tid is not None: dl(int(tid)) else: for exercise in list(course.exercises): if not exercise.is_completed: dl(exercise.tid) else: exercise.update_downloaded()
Download the exercises from the server.
5,087
def p_article(self, article): article[0] = Article(article[1][4], article[2], article[3], article[1][0], article[1][1], article[1][2], article[1][3], article[1][5])
article : ARTICLEHEADER opttexts rules opttexts
5,088
def sdiffstore(self, destkey, key, *keys): return self.execute(b, destkey, key, *keys)
Subtract multiple sets and store the resulting set in a key.
5,089
def set_checkpoint(self, checkpoint_trigger, checkpoint_path, isOverWrite=True): if not os.path.exists(checkpoint_path): mkpath(checkpoint_path) callBigDlFunc(self.bigdl_type, "setCheckPoint", self.value, checkpoint_trigger, checkpoint_path, isOverWrite)
Configure checkpoint settings. :param checkpoint_trigger: the interval to write snapshots :param checkpoint_path: the path to write snapshots into :param isOverWrite: whether to overwrite existing snapshots in path.default is True
5,090
def get_path_and_name(full_name): if full_name: parts = full_name.split("/") return ("/".join(parts[0:-1]), parts[-1]) if len(parts) > 1 else ("/", full_name) return None, None
Split Whole Patch onto 'Patch' and 'Name' :param full_name: <str> Full Resource Name - likes 'Root/Folder/Folder2/Name' :return: tuple (Patch, Name)
5,091
def start(self): nat_interface_number = yield from self._look_for_interface("nat") if nat_interface_number < 0: raise GNS3VMError("The GNS3 VM: {} must have a NAT interface configured in order to start".format(self.vmname)) hostonly_interface_number = yield from self._look_for_interface("hostonly") if hostonly_interface_number < 0: raise GNS3VMError("The GNS3 VM: {} must have a host only interface configured in order to start".format(self.vmname)) vboxnet = yield from self._look_for_vboxnet(hostonly_interface_number) if vboxnet is None: raise GNS3VMError("VirtualBox host-only network could not be found for interface {} on GNS3 VM".format(hostonly_interface_number)) if not (yield from self._check_dhcp_server(vboxnet)): raise GNS3VMError("DHCP must be enabled on VirtualBox host-only network: {} for GNS3 VM".format(vboxnet)) vm_state = yield from self._get_state() log.info(.format(self._vmname, vm_state)) if vm_state == "poweroff": yield from self.set_vcpus(self.vcpus) yield from self.set_ram(self.ram) if vm_state in ("poweroff", "saved"): args = [self._vmname] if self._headless: args.extend(["--type", "headless"]) yield from self._execute("startvm", args) elif vm_state == "paused": args = [self._vmname, "resume"] yield from self._execute("controlvm", args) ip_address = "127.0.0.1" try: with socket.socket() as s: s.bind((ip_address, 0)) api_port = s.getsockname()[1] except OSError as e: raise GNS3VMError("Error while getting random port: {}".format(e)) if (yield from self._check_vbox_port_forwarding()): log.info("Removing GNS3VM NAT port forwarding rule from interface {}".format(nat_interface_number)) yield from self._execute("controlvm", [self._vmname, "natpf{}".format(nat_interface_number), "delete", "GNS3VM"]) log.info("Adding GNS3VM NAT port forwarding rule with port {} to interface {}".format(api_port, nat_interface_number)) yield from self._execute("controlvm", [self._vmname, "natpf{}".format(nat_interface_number), "GNS3VM,tcp,{},{},,3080".format(ip_address, api_port)]) self.ip_address = yield from self._get_ip(hostonly_interface_number, api_port) self.port = 3080 log.info("GNS3 VM has been started with IP {}".format(self.ip_address)) self.running = True
Start the GNS3 VM.
5,092
def toc(*args, **kwargs): global Gtic_start f_elapsedTime = time.time() - Gtic_start for key, value in kwargs.items(): if key == : return value % f_elapsedTime if key == : return "Elapsed time = %f seconds." % f_elapsedTime return f_elapsedTime
Port of the MatLAB function of same name Behaviour is controllable to some extent by the keyword args:
5,093
def get_message_by_id(self, message_id): result = self.wapi_functions.getMessageById(message_id) if result: result = factory_message(result, self) return result
Fetch a message :param message_id: Message ID :type message_id: str :return: Message or False :rtype: Message
5,094
def clean_strings(iterable): retval = [] for val in iterable: try: retval.append(val.strip()) except(AttributeError): retval.append(val) return retval
Take a list of strings and clear whitespace on each one. If a value in the list is not a string pass it through untouched. Args: iterable: mixed list Returns: mixed list
5,095
def dot(vec1, vec2): if isinstance(vec1, Vector3) and isinstance(vec2, Vector3): return (vec1.x * vec2.x) + (vec1.y * vec2.y) + (vec1.z * vec2.z) elif isinstance(vec1, Vector4) and isinstance(vec2, Vector4): return (vec1.x * vec2.x) + (vec1.y * vec2.y) + (vec1.z * vec2.z) + (vec1.w * vec2.w) else: raise TypeError("vec1 and vec2 must a Vector type")
Returns the dot product of two Vectors
5,096
def iterate_from_vcf(infile, sample): vcf = pysam.VCF() vcf.connect(infile) if sample not in vcf.getsamples(): raise KeyError("sample %s not vcf file") for row in vcf.fetch(): result = vcf2pileup(row, sample) if result: yield result
iterate over a vcf-formatted file. *infile* can be any iterator over a lines. The function yields named tuples of the type :class:`pysam.Pileup.PileupSubstitution` or :class:`pysam.Pileup.PileupIndel`. Positions without a snp will be skipped. This method is wasteful and written to support same legacy code that expects samtools pileup output. Better use the vcf parser directly.
5,097
def parse_fntdata(_data, _config, _extra_data_receiver=None): data = {} frame_data_list = [] parse_common_info = parse("common lineHeight={line_height:d} base={base:d} scaleW={scale_w:d} scaleH={scale_h:d} pages={pages:d} packed={packed:d}", _data[1]) parse_page_info = parse("page id={id:d} file=\"{file}\"", _data[2]) parse_char_count = parse("chars count={count:d}", _data[3]) raw_frames_data = {} for index in xrange(0, parse_char_count["count"]): parse_frame = parse("char id={id:d} x={x:d} y={y:d} width={width:d} height={height:d} xoffset={xoffset:d} yoffset={yoffset:d} xadvance={xadvance:d} page={page:d} chnl={chnl:d} letter=\"{letter}\"", _data[index + 4]) frame_data = {} frame_data["name"] = "{prefix}_{id}.png".format(prefix= _config["prefix"], id=parse_frame["id"], letter=parse_frame["letter"]) frame_data["source_size"] = (parse_frame["width"], parse_frame["height"]) frame_data["rotated"] = False frame_data["src_rect"] = (parse_frame["x"], parse_frame["y"], parse_frame["x"] + parse_frame["width"], parse_frame["y"] + parse_frame["height"]) frame_data["offset"] = (0, 0) if parse_frame["width"] <= 0 or parse_frame["height"] <= 0: continue frame_data_list.append(frame_data) parse_frame_named_data = parse_frame.named.copy() parse_frame_named_data["texture"] = frame_data["name"] raw_frames_data[parse_frame["id"]] = parse_frame_named_data data["texture"] = parse_page_info["file"] data["frames"] = frame_data_list if _extra_data_receiver != None: _extra_data_receiver["common"] = parse_common_info.named _extra_data_receiver["frames"] = raw_frames_data return data
info face="Haettenschweiler" size=60 bold=0 italic=0 charset="" unicode=0 stretchH=100 smooth=1 aa=1 padding=0,0,0,0 spacing=2,2 common lineHeight=64 base=53 scaleW=256 scaleH=128 pages=1 packed=0 page id=0 file="attack_num.png" chars count=12 char id=52 x=2 y=2 width=33 height=51 xoffset=0 yoffset=5 xadvance=32 page=0 chnl=0 letter="4" char id=48 x=37 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=29 page=0 chnl=0 letter="0" char id=53 x=68 y=2 width=29 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="5" char id=57 x=99 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="9" char id=54 x=129 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="6" char id=56 x=159 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="8" char id=51 x=189 y=2 width=28 height=50 xoffset=1 yoffset=6 xadvance=28 page=0 chnl=0 letter="3" char id=50 x=219 y=2 width=28 height=49 xoffset=1 yoffset=7 xadvance=28 page=0 chnl=0 letter="2" char id=55 x=2 y=55 width=30 height=48 xoffset=1 yoffset=8 xadvance=28 page=0 chnl=0 letter="7" char id=49 x=34 y=55 width=20 height=48 xoffset=1 yoffset=8 xadvance=20 page=0 chnl=0 letter="1" char id=45 x=56 y=55 width=18 height=12 xoffset=1 yoffset=36 xadvance=19 page=0 chnl=0 letter="-" char id=32 x=76 y=55 width=0 height=0 xoffset=11 yoffset=73 xadvance=16 page=0 chnl=0 letter="space"
5,098
def merge(cls, components): action = cls.EXTEND val = {} for component in components: if component.action is cls.REPLACE: val = component.val action = cls.REPLACE elif component.action is cls.EXTEND: val.update(component.val) else: raise ParseError(.format(component.action)) return cls(action, val)
Merges components into a single component, applying their actions appropriately. This operation is associative: M(M(a, b), c) == M(a, M(b, c)) == M(a, b, c). :param list components: an iterable of instances of DictValueComponent. :return: An instance representing the result of merging the components. :rtype: `DictValueComponent`
5,099
def run(analysis, path=None, name=None, info=None, **kwargs): kwargs.update({ : analysis, : path, : name, : info, }) main(**kwargs)
Run a single analysis. :param Analysis analysis: Analysis class to run. :param str path: Path of analysis. Can be `__file__`. :param str name: Name of the analysis. :param dict info: Optional entries are ``version``, ``title``, ``readme``, ... :param dict static: Map[url regex, root-folder] to serve static content.