code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def deal_cards(self): <NEW_LINE> <INDENT> self.get_order(self.dealer) <NEW_LINE> cards_available = self.cards.copy() <NEW_LINE> random.shuffle(cards_available) <NEW_LINE> rnd_1 = [2, 3, 2, 3] <NEW_LINE> rnd_2 = [3, 2, 3, 2] <NEW_LINE> for num, i in enumerate(self.order): <NEW_LINE> <INDENT> for _ in range(rnd_1[num]): <NEW_LINE> <INDENT> card = cards_available[-1] <NEW_LINE> cards_available.remove(card) <NEW_LINE> self.players[i].add_card(card) <NEW_LINE> <DEDENT> <DEDENT> for num, i in enumerate(self.order): <NEW_LINE> <INDENT> for _ in range(rnd_2[num]): <NEW_LINE> <INDENT> card = cards_available[-1] <NEW_LINE> cards_available.remove(card) <NEW_LINE> self.players[i].add_card(card) <NEW_LINE> <DEDENT> <DEDENT> self.pickup_card = cards_available[0] <NEW_LINE> for p in self.players: <NEW_LINE> <INDENT> if p.AI: <NEW_LINE> <INDENT> for c in p.cards: <NEW_LINE> <INDENT> p.set_state(k=str(c), v=4) <NEW_LINE> <DEDENT> p.set_state(k=str(self.pickup_card), v=6)
Dealer hands out cards in order of 2,3,2,3,3,2,3,2
625941b630bbd722463cbbe8
def update(self, seconds): <NEW_LINE> <INDENT> self.calcGravity(seconds) <NEW_LINE> self.rect.x += self.change_x <NEW_LINE> block_hit_list = pygame.sprite.spritecollide(self, self.level.platform_list, False) <NEW_LINE> for block in block_hit_list: <NEW_LINE> <INDENT> if self.change_x > 0: <NEW_LINE> <INDENT> self.rect.right = block.rect.left <NEW_LINE> <DEDENT> elif self.change_x < 0: <NEW_LINE> <INDENT> self.rect.left = block.rect.right <NEW_LINE> <DEDENT> <DEDENT> self.rect.y += self.change_y <NEW_LINE> self.arm.update(seconds) <NEW_LINE> block_hit_list = pygame.sprite.spritecollide(self, self.level.platform_list, False) <NEW_LINE> for block in block_hit_list: <NEW_LINE> <INDENT> if self.change_y > 0: <NEW_LINE> <INDENT> self.rect.bottom = block.rect.top <NEW_LINE> <DEDENT> elif self.change_y < 0: <NEW_LINE> <INDENT> self.rect.top = block.rect.bottom <NEW_LINE> <DEDENT> self.change_y = 0
Update stats such as position of Player where seconds is time since last frame
625941b65166f23b2e1a4f7f
def Parse(line, params=None, collection=None, resolve=True): <NEW_LINE> <INDENT> return REGISTRY.Parse( line=line, params=params, collection=collection, resolve=resolve)
Parse a Cloud resource from a command line. Args: line: str, The argument provided on the command line. params: {str:str}, The keyword argument context. collection: str, The resource's collection, or None if it should be inferred from the line. resolve: bool, If True, call the resource's .Resolve() method before returning, ensuring that all of the resource parameters are defined. If False, don't call them, under the assumption that it will be called later. Returns: A resource object. Raises: InvalidResourceException: If the line is invalid. UnknownCollectionException: If no collection is provided or can be inferred. WrongProtocolException: If the input was http:// instead of https://
625941b6287bf620b61d3896
def proxyGETService(self, **kwargs): <NEW_LINE> <INDENT> allParams = ['name', 'namespace'] <NEW_LINE> params = locals() <NEW_LINE> for (key, val) in params['kwargs'].iteritems(): <NEW_LINE> <INDENT> if key not in allParams: <NEW_LINE> <INDENT> raise TypeError("Got an unexpected keyword argument '%s'" " to method proxyGETService" % key) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> resourcePath = '/api/v1beta1/proxy/services/{name}' <NEW_LINE> resourcePath = resourcePath.replace('{format}', 'json') <NEW_LINE> method = 'GET' <NEW_LINE> queryParams = {} <NEW_LINE> headerParams = {} <NEW_LINE> formParams = {} <NEW_LINE> files = {} <NEW_LINE> bodyParam = None <NEW_LINE> headerParams['Accept'] = '*/*'; <NEW_LINE> headerParams['Content-Type'] = '*/*'; <NEW_LINE> if ('namespace' in params): <NEW_LINE> <INDENT> queryParams['namespace'] = self.apiClient.toPathValue( params['namespace']) <NEW_LINE> <DEDENT> if ('name' in params): <NEW_LINE> <INDENT> replacement = str(self.apiClient.toPathValue( params['name'])) <NEW_LINE> resourcePath = resourcePath.replace('{' + 'name' + '}', replacement) <NEW_LINE> <DEDENT> postData = (formParams if formParams else bodyParam) <NEW_LINE> response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files) <NEW_LINE> if not response: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> responseObject = self.apiClient.deserialize(response, 'void') <NEW_LINE> return responseObject
proxy GET requests to Service Args: name, str: name of the Service (required) namespace, str: object name and auth scope, such as for teams and projects (required) Returns: void
625941b6fb3f5b602dac34b5
@click.command('versions', short_help='Displays available IPS and resource versions.') <NEW_LINE> @click.argument('resource', default='ips', metavar='<resource>') <NEW_LINE> @pass_context <NEW_LINE> def cli(ctx, resource): <NEW_LINE> <INDENT> log = logging.getLogger('ipsv.setup') <NEW_LINE> assert isinstance(ctx, Context) <NEW_LINE> resource = str(resource).lower() <NEW_LINE> if resource == 'ips': <NEW_LINE> <INDENT> resource = IpsManager(ctx) <NEW_LINE> for r in resource.versions.values(): <NEW_LINE> <INDENT> click.secho(r.version.vstring, bold=True) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if resource in ('dev_tools', 'dev tools'): <NEW_LINE> <INDENT> resource = DevToolsManager(ctx) <NEW_LINE> for r in resource.versions.values(): <NEW_LINE> <INDENT> click.secho('{v} ({id})'.format(v=r.version.vstring, id=r.version.vid), bold=True) <NEW_LINE> <DEDENT> return
Displays all locally cached <resource> versions available for installation.  Available resources: ips (default) dev_tools
625941b6b5575c28eb68de23
def kifserialize(ast, ontology, out): <NEW_LINE> <INDENT> for child in ast.children: <NEW_LINE> <INDENT> if child.ontology != ontology: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> line = "".join([str(child), '\n']) <NEW_LINE> out.write(line)
Writes ontology to disk as kif. Parses ast and writes out all nodes that belong to ontology. Args: - ast: the Abstract Syntax Tree - ontology: The specific ontology with is written to disk - f: The file object witch written in Raises: - OSError
625941b6b57a9660fec336a6
def AutoStartPowerOff(self): <NEW_LINE> <INDENT> return self.delegate("AutoStartPowerOff")()
Powers-off virtual machines according to the current AutoStart configuration .Powers-off virtual machines according to the current AutoStart configuration.
625941b67cff6e4e811177ac
def install_on_demand(self): <NEW_LINE> <INDENT> if not self.check_is_installed(): <NEW_LINE> <INDENT> self.logger.info("Installing software for validator: %s" % self.id) <NEW_LINE> self.check_requirements() <NEW_LINE> self.init_installation() <NEW_LINE> self.install() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.debug("Plug-in was already installed: %s" % self.id)
See if we are already installed. If not install required binary blobs and other crap to run this validator.
625941b63c8af77a43ae35c4
def op_abs(stack=None, **kwargs): <NEW_LINE> <INDENT> stack.append(bytevector(abs(number(stack.pop))))
the input is made positive
625941b6de87d2750b85fbb4
@curry <NEW_LINE> def ends_with(needle, haystack): <NEW_LINE> <INDENT> return haystack[-len(needle) :] == needle
Checks if a list ends with the provided values
625941b655399d3f055884da
def muck_dependents(ctx:Ctx) -> None: <NEW_LINE> <INDENT> s = set() <NEW_LINE> for target in ctx.targets: <NEW_LINE> <INDENT> s.update(ctx.db.get_dependents(target)) <NEW_LINE> <DEDENT> for t in sorted(s): <NEW_LINE> <INDENT> outL(t)
`muck dependents command.
625941b6be8e80087fb20a75
def extract_tarball(filename, force=False): <NEW_LINE> <INDENT> root = filename.split('.')[0] <NEW_LINE> if os.path.isdir(root) and not force: <NEW_LINE> <INDENT> print('%s already present - Skipping extraction of %s.' % (root, filename)) <NEW_LINE> return <NEW_LINE> <DEDENT> if (filename.endswith("tar.gz")): <NEW_LINE> <INDENT> print("Extracting %s ..." % filename) <NEW_LINE> tar = tarfile.open(filename, "r:gz") <NEW_LINE> tar.extractall() <NEW_LINE> tar.close()
Helper function for extracting tar archive file
625941b7d99f1b3c44c673be
def typen(text): <NEW_LINE> <INDENT> qualifiers = [] <NEW_LINE> for qualifier in language_qualifiers: <NEW_LINE> <INDENT> text, present = parse.get_remove(text, qualifier) <NEW_LINE> if present: <NEW_LINE> <INDENT> qualifiers.append(qualifier) <NEW_LINE> <DEDENT> <DEDENT> if ('<' in text) and ('>' in text): <NEW_LINE> <INDENT> template = parse.between(text, '<', '>') <NEW_LINE> text = parse.not_between(text, '<', '>') <NEW_LINE> assert ' ' not in text, "{} has a space in it".format(text) <NEW_LINE> template_arguments = parse.clean_split(template, ',') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template_arguments = [] <NEW_LINE> <DEDENT> type_name, ref = parse.get_remove(text, '&') <NEW_LINE> if not ref: <NEW_LINE> <INDENT> type_name, ptr = parse.get_remove(text, '*') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ptr = False <NEW_LINE> <DEDENT> return { 'name': type_name, 'template_args': template_arguments, 'qualifiers': qualifiers, 'ref': ref, 'ptr': ptr, }
TODO: Use clang.
625941b78a43f66fc4b53e90
@contextmanager <NEW_LINE> def transaction() -> Generator: <NEW_LINE> <INDENT> session = current_session() <NEW_LINE> try: <NEW_LINE> <INDENT> yield session <NEW_LINE> session.commit() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> session.rollback() <NEW_LINE> raise
Context manager for database transaction.
625941b7ff9c53063f47c024
def execute_command(cmd, ignore_errors=False, direct_io=False, cwd=None): <NEW_LINE> <INDENT> jcmd = " ".join(cmd) <NEW_LINE> log.debug("Running command: {}".format(jcmd)) <NEW_LINE> try: <NEW_LINE> <INDENT> if direct_io: <NEW_LINE> <INDENT> pipe = Popen(cmd, cwd=cwd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pipe = Popen(cmd, stdout=PIPE, stderr=PIPE, cwd=cwd) <NEW_LINE> <DEDENT> stdout, stderr = pipe.communicate() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> log.error("Error executing command {}!".format(jcmd)) <NEW_LINE> raise <NEW_LINE> <DEDENT> if pipe.returncode != 0: <NEW_LINE> <INDENT> if ignore_errors: <NEW_LINE> <INDENT> log.warning("Command '{}' failed with exit code {}. Ignored.". format(jcmd, pipe.returncode)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not direct_io: <NEW_LINE> <INDENT> log.info("Command '{}' stdout:".format(jcmd)) <NEW_LINE> for line in stdout.splitlines(): <NEW_LINE> <INDENT> log.info(line) <NEW_LINE> <DEDENT> log.info("Command '{}' stderr:".format(jcmd)) <NEW_LINE> for line in stderr.splitlines(): <NEW_LINE> <INDENT> log.info(line) <NEW_LINE> <DEDENT> <DEDENT> msg = "Command '{}' failed with exit code {}. \n" "(stdout: {}\nstderr: {})" .format(jcmd, pipe.returncode, stdout, stderr) <NEW_LINE> log.error(msg) <NEW_LINE> raise Exception(msg) <NEW_LINE> <DEDENT> <DEDENT> return stdout
Execute the command `cmd` specified as a list of ['program', 'arg', ...] If ignore_errors is true, a non-zero exit code will be ignored, otherwise an exception is raised. If direct_io is True, do not capture the stdin and stdout of the command Returns the stdout of the command.
625941b7bf627c535bc12ffd
def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkMaskedRankImageFilterIUS2IUS2IUS2SE2_Superclass.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj
New() -> itkMaskedRankImageFilterIUS2IUS2IUS2SE2_Superclass Create a new object of the class itkMaskedRankImageFilterIUS2IUS2IUS2SE2_Superclass and set the input and the parameters if some named or non-named arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects - the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. Ex: itkMaskedRankImageFilterIUS2IUS2IUS2SE2_Superclass.New( reader, Threshold=10 ) is (most of the time) equivalent to: obj = itkMaskedRankImageFilterIUS2IUS2IUS2SE2_Superclass.New() obj.SetInput( 0, reader.GetOutput() ) obj.SetThreshold( 10 )
625941b7be383301e01b52b4
def predict_proba(self, X): <NEW_LINE> <INDENT> return (self.predict(X))
определение вероятности
625941b71f037a2d8b946026
def save(self, labels: SemanticSegmentationLabels) -> None: <NEW_LINE> <INDENT> local_root = get_local_path(self.root_uri, self.tmp_dir) <NEW_LINE> make_dir(local_root) <NEW_LINE> out_profile = { 'driver': 'GTiff', 'height': self.extent.ymax, 'width': self.extent.xmax, 'transform': self.crs_transformer.get_affine_transform(), 'crs': self.crs_transformer.get_image_crs(), 'blockxsize': self.rasterio_block_size, 'blockysize': self.rasterio_block_size } <NEW_LINE> if self.score_raster_source: <NEW_LINE> <INDENT> log.info('Old scores found. Merging with current scores.') <NEW_LINE> old_labels = self.get_scores() <NEW_LINE> labels += old_labels <NEW_LINE> <DEDENT> self.write_discrete_raster_output( out_profile, get_local_path(self.label_uri, self.tmp_dir), labels) <NEW_LINE> if self.smooth_output: <NEW_LINE> <INDENT> self.write_smooth_raster_output( out_profile, get_local_path(self.score_uri, self.tmp_dir), get_local_path(self.hits_uri, self.tmp_dir), labels, chip_sz=self.rasterio_block_size) <NEW_LINE> <DEDENT> if self.vector_outputs: <NEW_LINE> <INDENT> self.write_vector_outputs(labels) <NEW_LINE> <DEDENT> sync_to_dir(local_root, self.root_uri)
Save labels to disk. More info on rasterio IO: - https://github.com/mapbox/rasterio/blob/master/docs/quickstart.rst - https://rasterio.readthedocs.io/en/latest/topics/windowed-rw.html Args: labels - (SemanticSegmentationLabels) labels to be saved
625941b7566aa707497f43a1
def start(name): <NEW_LINE> <INDENT> ret = {} <NEW_LINE> client = salt.client.get_local_client(__opts__['conf_file']) <NEW_LINE> data = vm_info(name, quiet=True) <NEW_LINE> if not data: <NEW_LINE> <INDENT> __progress__('Failed to find vm {0} to start'.format(name)) <NEW_LINE> return 'fail' <NEW_LINE> <DEDENT> hyper = next(data.iterkeys()) <NEW_LINE> if data[hyper][name]['state'] == 'running': <NEW_LINE> <INDENT> print('VM {0} is already running'.format(name)) <NEW_LINE> return 'bad state' <NEW_LINE> <DEDENT> cmd_ret = client.cmd_iter( hyper, 'virt.start', [name], timeout=600) <NEW_LINE> for comp in cmd_ret: <NEW_LINE> <INDENT> ret.update(comp) <NEW_LINE> <DEDENT> __progress__('Started VM {0}'.format(name)) <NEW_LINE> return 'good'
Start a named virtual machine
625941b76aa9bd52df036bc9
def test_eval_r_correct_evaluation(self): <NEW_LINE> <INDENT> eval_result = self.cost_function.eval_r(x=self.x_val, y=self.y_val, e=self.e_val, params=[5]) <NEW_LINE> self.assertTrue(all(eval_result == np.array([0, -0.75, 4])))
Test that eval_r is running the correct function
625941b73346ee7daa2b2b90
def get_test_examples(self, data_dir): <NEW_LINE> <INDENT> self.language = "en" <NEW_LINE> lines = self._read_tsv(os.path.join(data_dir, "xnli.test.tsv")) <NEW_LINE> examples = [] <NEW_LINE> for (i, line) in enumerate(lines): <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> guid = "test-%d" % (i) <NEW_LINE> language = tokenization.convert_to_unicode(line[0]) <NEW_LINE> if language != tokenization.convert_to_unicode(self.language): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> text_a = tokenization.convert_to_unicode(line[6]) <NEW_LINE> text_b = tokenization.convert_to_unicode(line[7]) <NEW_LINE> label = tokenization.convert_to_unicode(line[1]) <NEW_LINE> examples.append( InputExample( guid=guid, text_a=text_a, text_b=text_b, label=label)) <NEW_LINE> <DEDENT> return examples
See base class.
625941b7462c4b4f79d1d4f7
def p_if_else_simple(p): <NEW_LINE> <INDENT> p[0] = symbol_coder.c_concatenate(p)
if : condition QU_MARK cmd COLON cmd | condition QU_MARK assign_value COLON assign_value
625941b7aad79263cf390861
def test_validate_cdf(): <NEW_LINE> <INDENT> def invalid_cdf(x, mu, sigma): <NEW_LINE> <INDENT> return stats.lognorm.cdf(x, sigma, scale=np.exp(mu)) <NEW_LINE> <DEDENT> with nose.tools.assert_raises(AttributeError): <NEW_LINE> <INDENT> inputs.Input(var=valid_var, cdf=invalid_cdf, bounds=valid_bounds, alpha=valid_alpha, params=valid_params)
Testing validation of cdf attribute.
625941b7d18da76e235322f8
def getQueryStringFromDb(self): <NEW_LINE> <INDENT> selectedIndex = self.dlg.cmbAnalysis.currentIndex() <NEW_LINE> if selectedIndex > 0: <NEW_LINE> <INDENT> selectedAnalysis = self.dlg.cmbAnalysis.itemData(selectedIndex) <NEW_LINE> queryString = "select * from selvansgeo.analysis where id = " <NEW_LINE> queryString += selectedAnalysis <NEW_LINE> whereClause = " id = " + selectedAnalysis <NEW_LINE> pgLayer = self.pgdb.getLayer("selvansgeo", "analysis", None, whereClause, "Analysis config", "fake_id") <NEW_LINE> iter = pgLayer.getFeatures() <NEW_LINE> for feature in iter: <NEW_LINE> <INDENT> attrs = feature.attributes() <NEW_LINE> idx = pgLayer.fields().indexFromName("querystring") <NEW_LINE> querystring = attrs[idx] <NEW_LINE> idx = pgLayer.fields().indexFromName("date_filtering") <NEW_LINE> datefiltering = self.toBool(attrs[idx]) <NEW_LINE> idx = pgLayer.fields().indexFromName("timerange_filtering") <NEW_LINE> timerangefiltering = self.toBool(attrs[idx]) <NEW_LINE> idx = pgLayer.fields().indexFromName("coupetype_filtering") <NEW_LINE> coupetypefiltering = self.toBool(attrs[idx]) <NEW_LINE> <DEDENT> if querystring and querystring != "": <NEW_LINE> <INDENT> self.dlg.txtMssqlQuery.setPlainText(querystring) <NEW_LINE> self.messageBar.pushInfo("Connexion PG", str(u"Définition " + u"récupérée avec succès")) <NEW_LINE> self.setUpAnalysisGui(datefiltering, timerangefiltering, coupetypefiltering) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dlg.txtMssqlQuery.setPlainText("") <NEW_LINE> self.messageBar.pushCritical("Erreur", str(u"La requête n'est pas " + u" définie dans la base")) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.dlg.txtMssqlQuery.setPlainText("")
Get the query string from db
625941b738b623060ff0ac16
def set_Tags(self, value): <NEW_LINE> <INDENT> super(CreateLinkPostInputSet, self)._set_input('Tags', value)
Set the value of the Tags input for this Choreo. ((optional, string) Comma-separated tags for this post.)
625941b7cdde0d52a9e52e55
def forward(self, words1, words2, seq_len1, seq_len2, target=None): <NEW_LINE> <INDENT> mask1 = seq_len_to_mask(seq_len1, words1.size(1)) <NEW_LINE> mask2 = seq_len_to_mask(seq_len2, words2.size(1)) <NEW_LINE> a0 = self.embedding(words1) <NEW_LINE> b0 = self.embedding(words2) <NEW_LINE> a0, b0 = self.dropout_embed(a0), self.dropout_embed(b0) <NEW_LINE> a = self.rnn(a0, mask1.byte()) <NEW_LINE> b = self.rnn(b0, mask2.byte()) <NEW_LINE> ai, bi = self.bi_attention(a, mask1, b, mask2) <NEW_LINE> a_ = torch.cat((a, ai, a - ai, a * ai), dim=2) <NEW_LINE> b_ = torch.cat((b, bi, b - bi, b * bi), dim=2) <NEW_LINE> a_f = self.interfere(a_) <NEW_LINE> b_f = self.interfere(b_) <NEW_LINE> a_h = self.rnn_high(a_f, mask1.byte()) <NEW_LINE> b_h = self.rnn_high(b_f, mask2.byte()) <NEW_LINE> a_avg = self.mean_pooling(a_h, mask1, dim=1) <NEW_LINE> a_max, _ = self.max_pooling(a_h, mask1, dim=1) <NEW_LINE> b_avg = self.mean_pooling(b_h, mask2, dim=1) <NEW_LINE> b_max, _ = self.max_pooling(b_h, mask2, dim=1) <NEW_LINE> out = torch.cat((a_avg, a_max, b_avg, b_max), dim=1) <NEW_LINE> logits = torch.tanh(self.classifier(out)) <NEW_LINE> if target is not None: <NEW_LINE> <INDENT> loss_fct = CrossEntropyLoss() <NEW_LINE> loss = loss_fct(logits, target) <NEW_LINE> return {Const.LOSS: loss, Const.OUTPUT: logits} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {Const.OUTPUT: logits}
:param words1: [batch, seq_len] :param words2: [batch, seq_len] :param seq_len1: [batch] :param seq_len2: [batch] :param target: :return:
625941b785dfad0860c3ac80
def main(): <NEW_LINE> <INDENT> imgFile = 'image.jpg' <NEW_LINE> cropFile = 'cropped.jpg' <NEW_LINE> filtFile = 'filtered.jpg' <NEW_LINE> outFile = 'output.csv' <NEW_LINE> get_image(imgFile, imgFile) <NEW_LINE> get_input_and_crop(imgFile, cropFile) <NEW_LINE> filter_image(cropFile, filtFile) <NEW_LINE> extractedText = extract_text(filtFile) <NEW_LINE> write_text(extractedText, outFile)
Main sequence for gathering image, and outputting text to file
625941b7e1aae11d1e749ada
def clearselection(self): <NEW_LINE> <INDENT> for line in self.selectlines: <NEW_LINE> <INDENT> self.scene().removeItem(line)
Clears the selection from the stripchart
625941b72ae34c7f2600cf59
def __init__(self, parser=None, logger=None): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.data = G.DATA <NEW_LINE> self.data.registerStore(IniFileStore(**{ 'name': 'Configuration', 'alias': 'config', 'filetype': 'config', 'synced': True })) <NEW_LINE> self._spawnDaemon()
ATTENTION: Be aware that creating any py:class::freedom.daemon.GenericDaemon might return either an instance of a GenericDaemon or a py:class::freedom.client.DaemonClient depending on a daemon already running or not! So please do not rely on the created daemon being a daemon instance in every case. Do a check first! Inits the daemon. You can provide your own py:class::argparse.ArgumentParser to this daemon. By default a freedm daemon supports a standard ArgumentParser and will add certain subparsers to the provided ArgumentParser for starting/stopping the daemon and for accessing daemon worker modules or creating a live console. :param parser: An optional py:class::argparse.ArgumentParser (default: None) :param logger: An optional py:class::logging.Logger (default: None)
625941b77047854f462a1234
def __neg__(self): <NEW_LINE> <INDENT> raise ValueError('- operator not supported for %s: use .as_array()' % self.__class__.__name__)
Negation of each element.
625941b74a966d76dd550e33
def __str__(self): <NEW_LINE> <INDENT> return "[ThirdClass value is {0}]".format(self.data)
overload for prints
625941b7cdde0d52a9e52e56
def plot_precision_recall(y, y_prob): <NEW_LINE> <INDENT> Precision1, Recall1, thresholds = precision_recall_curve(y, y_prob[:, 0]) <NEW_LINE> Precision2, Recall2, thresholds = precision_recall_curve(y, y_prob[:, 1]) <NEW_LINE> fig = plt.figure() <NEW_LINE> fig.set_size_inches(12, 12) <NEW_LINE> ax1 = fig.add_subplot(1, 1, 1) <NEW_LINE> plt.plot(Recall1, Precision1, lw=2, label='1st') <NEW_LINE> plt.plot(Recall2, Precision2, lw=2, label='2nd') <NEW_LINE> ax1.set_xlabel('Recall', size=18) <NEW_LINE> ax1.set_ylabel('Precision', size=18) <NEW_LINE> ax1.tick_params(labelsize=18) <NEW_LINE> plt.ylim([0.0, 1.05]) <NEW_LINE> plt.xlim([0.0, 1.0]) <NEW_LINE> plt.legend(loc='upper center', bbox_to_anchor=(0.5, 1.05), fancybox=True, shadow=True) <NEW_LINE> fig.savefig(args.path_to_videos + '/pr.png')
Plots the precision recall curve for the model :param y: labels :param y_prob: predicited :return: None
625941b73eb6a72ae02ec301
def fit_source_loc(self, src_ra, src_dec, size, seed, **kwargs): <NEW_LINE> <INDENT> def _llh(x, *args): <NEW_LINE> <INDENT> if not (x[0] == self._src_ra and x[1] == self._src_dec): <NEW_LINE> <INDENT> self._select_events(x[0], x[1]) <NEW_LINE> <DEDENT> x = x[2:] <NEW_LINE> fit_pars = dict([(par, xi) for par, xi in zip(self.params, x)]) <NEW_LINE> fun, grad = self.llh(**fit_pars) <NEW_LINE> return -fun <NEW_LINE> <DEDENT> if "scramble" in kwargs: <NEW_LINE> <INDENT> raise ValueError("No scrambling of events allowed fit_source_loc") <NEW_LINE> <DEDENT> if "approx_grad" in kwargs and not kwargs["approx_grad"]: <NEW_LINE> <INDENT> raise ValueError("Cannot use gradients for location scan") <NEW_LINE> <DEDENT> kwargs.pop("approx_grad", None) <NEW_LINE> kwargs.setdefault("pgtol", _pgtol) <NEW_LINE> loc_bound = [[max(0., src_ra - size / np.cos(src_dec)), min(2. * np.pi, src_ra + size / np.cos(src_dec))], [src_dec - size, src_dec + size]] <NEW_LINE> pars = [src_ra, src_dec] + [seed[par] for par in self.params] <NEW_LINE> bounds = np.vstack([loc_bound, self.par_bounds]) <NEW_LINE> xmin, fmin, min_dict = scipy.optimize.fmin_l_bfgs_b( _llh, pars, bounds=bounds, approx_grad=True, **kwargs) <NEW_LINE> if self._N > 0 and abs(xmin[0]) > _rho_max * self._n: <NEW_LINE> <INDENT> logger.error(("nsources > {0:7.2%} * {1:6d} selected events, " "fit-value nsources = {2:8.1f}").format( _rho_max, self._n, xmin[0])) <NEW_LINE> <DEDENT> xmin = dict([("ra", xmin[0]), ("dec", xmin[1])] + [(par, xi) for par, xi in zip(self.params, xmin[2:])]) <NEW_LINE> fmin *= -np.sign(xmin["nsources"]) <NEW_LINE> return fmin, xmin
Minimize the negative log-Likelihood around interesting position. Parameters ---------- src_ra src_dec : array_like Source position(s). size : float Size of the box for minimisation seed : dictionary Best seed for region Returns ------- fmin : float Minimal function value turned into test statistic -sign(ns)*logLambda xmin : dict Parameters minimising the likelihood ratio. Other parameters ---------------- kwargs Parameters passed to the L-BFGS-B minimiser.
625941b791f36d47f21ac31d
def testONCAddPSKWifi(self): <NEW_LINE> <INDENT> wifi_networks = { 'ssid-wpa': 'WPA', } <NEW_LINE> self._ReadONCFileAndSet('toplevel_wifi_wpa_psk.onc') <NEW_LINE> self._VerifyRememberedWifiNetworks(wifi_networks)
Test adding WPA network.
625941b7377c676e91271fd2
def verify_password(saved_hashed_password, input_raw_password): <NEW_LINE> <INDENT> pwd_salt = saved_hashed_password[-32:] <NEW_LINE> return saved_hashed_password == get_hashed_password(input_raw_password, pwd_salt)
Check if the stored hashed password is generated by the input_raw_password The user's input raw password is hashed first and then compared with the 'saved_hashed_password'. Return a boolean value to indicate the comparison result.
625941b7097d151d1a222c83
def parsing_from_settings(): <NEW_LINE> <INDENT> success_parsing = 0 <NEW_LINE> messages_for_notification = {} <NEW_LINE> data_for_parsing = googlesheets.load_settings_for_parsing() <NEW_LINE> all_parsing = len(data_for_parsing) <NEW_LINE> for data in data_for_parsing: <NEW_LINE> <INDENT> if data[4] == 'Активен': <NEW_LINE> <INDENT> user_id = data[0] <NEW_LINE> url = data[1] <NEW_LINE> spreadsheet_url = data[2] <NEW_LINE> worksheet_title = data[3] <NEW_LINE> wb_parser = Parser(url) <NEW_LINE> wb_parser.parsing() <NEW_LINE> if len(wb_parser.data_result) != 0: <NEW_LINE> <INDENT> df_from_parsing = pd.DataFrame(wb_parser.data_result, columns=HEADERS_FOR_PARSING) <NEW_LINE> df_from_file = googlesheets.load_file_parsing_from_gsheet(spreadsheet_url, worksheet_title) <NEW_LINE> df_merge = df_from_file.merge(df_from_parsing, how='outer', left_on='id_product', right_on='id_product') <NEW_LINE> df_merge = analytics.change_analysis(df_merge) <NEW_LINE> if user_id in messages_for_notification: <NEW_LINE> <INDENT> messages_for_notification[user_id].append([ url, spreadsheet_url, worksheet_title, analytics.status_for_message(df_merge)]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages_for_notification[user_id] = [] <NEW_LINE> messages_for_notification[user_id].append([ url, spreadsheet_url, worksheet_title, analytics.status_for_message(df_merge)]) <NEW_LINE> <DEDENT> googlesheets.write_parsing_to_gsheet(spreadsheet_url, worksheet_title, df_merge) <NEW_LINE> print('Готово! Спарсили', len(wb_parser.data_result)) <NEW_LINE> success_parsing += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Что-то пошло не так') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print(f'{success_parsing} из {all_parsing}') <NEW_LINE> return messages_for_notification
:return: messages_for_notification # список сообщений для отправки согласно статусов товаров
625941b7c432627299f04a6b
def charge(self, price: int): <NEW_LINE> <INDENT> if price + self._balance > self._limit: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._balance += price <NEW_LINE> return True
Charge given price to the card, assuming sufficient credit limit. Return True if charge was processed; False if charge was denied.
625941b78e71fb1e9831d5d4
def getRear(self) -> int: <NEW_LINE> <INDENT> if self.isEmpty(): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> return self.deque[self.right]
Get the last item from the deque.
625941b796565a6dacc8f4fc
def create_spooled_temporary_file(filepath): <NEW_LINE> <INDENT> spooled_file = tempfile.SpooledTemporaryFile( max_size=10 * 1024 * 1024, dir=settings.TMP_DIR) <NEW_LINE> tmpfile = open(filepath, 'r+b') <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> data = tmpfile.read(1024 * 1000) <NEW_LINE> if not data: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> spooled_file.write(data) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> tmpfile.close() <NEW_LINE> <DEDENT> return spooled_file
Create a spooled temporary file. :param filepath: Path of input file :type filepath: str :returns: file of the spooled temporary file :rtype: :class:`tempfile.SpooledTemporaryFile`
625941b75166f23b2e1a4f80
def test_00_payslip_flow(self): <NEW_LINE> <INDENT> richard_payslip = self.env['hr.payslip'].create({ 'name': 'Payslip of Richard', 'employee_id': self.richard_emp.id }) <NEW_LINE> payslip_input = self.env['hr.payslip.input'].search([('payslip_id', '=', richard_payslip.id)]) <NEW_LINE> payslip_input.write({'amount': 5.0}) <NEW_LINE> self.assertEqual(richard_payslip.state, 'draft', 'State not changed!') <NEW_LINE> context = { "lang": "en_US", "tz": False, "active_model": "ir.ui.menu", "department_id": False, "section_id": False, "active_ids": [self.ref("hr_payroll.menu_department_tree")], "active_id": self.ref("hr_payroll.menu_department_tree") } <NEW_LINE> richard_payslip.with_context(context).compute_sheet() <NEW_LINE> richard_payslip.action_payslip_done() <NEW_LINE> self.assertEqual(richard_payslip.state, 'done', 'State not changed!') <NEW_LINE> richard_payslip.refund_sheet() <NEW_LINE> payslip_refund = self.env['hr.payslip'].search([('name', 'like', 'Refund: '+ richard_payslip.name), ('credit_note', '=', True)]) <NEW_LINE> self.assertTrue(bool(payslip_refund), "Payslip not refunded!") <NEW_LINE> payslip_run = self.env['hr.payslip.run'].create({ 'date_end': '2011-09-30', 'date_start': '2011-09-01', 'name': 'Payslip for Employee' }) <NEW_LINE> payslip_employee = self.env['hr.payslip.employees'].create({ 'employee_ids': [(4, self.richard_emp.ids)] }) <NEW_LINE> payslip_employee.with_context(active_id=payslip_run.id).compute_sheet() <NEW_LINE> self.env['payslip.lines.contribution.register'].create({ 'date_from': '2011-09-30', 'date_to': '2011-09-01' }) <NEW_LINE> data, data_format = self.env.ref('hr_payroll.action_report_payslip').render(richard_payslip.ids) <NEW_LINE> if config.get('test_report_directory'): <NEW_LINE> <INDENT> open(os.path.join(config['test_report_directory'], 'hr_payroll-payslip.'+ data_format), 'wb+').write(data) <NEW_LINE> <DEDENT> data, data_format = self.env.ref('hr_payroll.payslip_details_report').render(richard_payslip.ids) <NEW_LINE> if config.get('test_report_directory'): <NEW_LINE> <INDENT> open(os.path.join(config['test_report_directory'], 'hr_payroll-payslipdetails.'+ data_format), 'wb+').write(data) <NEW_LINE> <DEDENT> context = {'model': 'hr.contribution.register', 'active_ids': [self.ref('hr_payroll.hr_houserent_register')]} <NEW_LINE> test_reports.try_report_action(self.env.cr, self.env.uid, 'action_payslip_lines_contribution_register', context=context, our_module='hr_payroll')
Testing payslip flow and report printing
625941b76fece00bbac2d562
def navRootObject(self): <NEW_LINE> <INDENT> parent = aq_parent(aq_inner(self)) <NEW_LINE> while parent and not IHelpCenterNavRoot.providedBy(parent): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> parent = aq_parent(parent) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return parent
Find the metadata parent
625941b729b78933be1e54e1
def find_revision_id(self, revision=None): <NEW_LINE> <INDENT> self.create() <NEW_LINE> revision = self.expand_branch_name(revision) <NEW_LINE> output = self.context.capture('git', 'rev-parse', revision) <NEW_LINE> return self.ensure_hexadecimal_string(output, 'git rev-parse')
Find the global revision id of the given revision.
625941b7090684286d50eb07
def parse_time_offset_from_now(s): <NEW_LINE> <INDENT> td = timedelta() <NEW_LINE> m1 = re.search(r"(^.*{current_time})([+-]\d+[smhd])(.*$)", s) <NEW_LINE> m2 = re.search(r"(^.*{now})([+-]\d+[smhd])(.*$)", s) <NEW_LINE> m = m1 or m2 <NEW_LINE> if m: <NEW_LINE> <INDENT> s1 = m.group(1) <NEW_LINE> s2 = m.group(2) <NEW_LINE> s3 = m.group(3) <NEW_LINE> s = s1 + s3 <NEW_LINE> td = parse_timedelta(s2) <NEW_LINE> <DEDENT> return s, td
Parses a string as used in the token event handler "New date {now}+5d. Some {other} {tags}" or "New date {now}-30m! Some {other} {tags}". This returns the string "New date {now}. Some {other} {tags}" and the timedelta of 5 days. Allowed tags are {now} and {current_time}. Only one tag of {now} or { current_time} is allowed. Allowed offsets are "s": seconds, "m": minutes, "h": hours, "d": days. :param s: The string to be parsed. :return: tuple of modified string and timedelta
625941b745492302aab5e0e7
def save_yaml (object): <NEW_LINE> <INDENT> pass
Save yaml from object with possibly awkward content (eg NULLs)
625941b7d99f1b3c44c673bf
def click(self, timeout=None, offset=None): <NEW_LINE> <INDENT> self.must_wait(timeout=timeout) <NEW_LINE> x, y = self.center(offset=offset) <NEW_LINE> self.session.click(x, y)
Click UI element. Args: timeout: seconds wait element show up offset: (xoff, yoff) default (0.5, 0.5) -> center The click method does the same logic as java uiautomator does. 1. waitForExists 2. get VisibleBounds center 3. send click event Raises: UiObjectNotFoundError
625941b7796e427e537b03e9
def get_grade_point_average(credit_hours, grade_points): <NEW_LINE> <INDENT> GPA = grade_points / credit_hours <NEW_LINE> return GPA
Returns grade point average as a decimal value (float) :param credit_points: Total credit points for a student. :param grade_points: Total grade points for a student. :return: The grade point average for a student WRITE YOUR CODE AFTER THE THREE QUOTES BELOW
625941b7004d5f362079a15f
def objectWizzard(self, objectName): <NEW_LINE> <INDENT> inst = getattr(fb.gD['imports'][objectName], objectName) <NEW_LINE> if (inst != None): <NEW_LINE> <INDENT> return inst() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.DEBUG: <NEW_LINE> <INDENT> print('--- FilterFileReader.objectWizzard ---') <NEW_LINE> print("Unknown object '{0}', could not be created,".format(objectName)) <NEW_LINE> print("Class: FilterFileReader.objectWizzard\n")
Try to create an instance of "objectName". This is only possible when the corresponding module has been imported already, e.g. using the function dynamicImport. E.g. self.myFilter = fr.objectWizzard('cheby1') Parameters ---------- objectName: string The object to be constructed (e.g. 'cheby1' or 'equiripple') Returns ------- The instance
625941b74c3428357757c152
def laguerre(n,x): <NEW_LINE> <INDENT> _init() <NEW_LINE> return sage_eval(maxima.eval('laguerre(%s,x)'%ZZ(n)), locals={'x':x})
Return the Laguerre polynomial for integers `n > -1`. REFERENCE: - [ASHandbook]_ 22.5.16, page 778 and page 789. EXAMPLES:: sage: x = PolynomialRing(QQ, 'x').gen() sage: laguerre(2,x) 1/2*x^2 - 2*x + 1 sage: laguerre(3,x) -1/6*x^3 + 3/2*x^2 - 3*x + 1 sage: laguerre(2,2) -1
625941b7adb09d7d5db6c5bb
def test_init_state_from_dictionary(self): <NEW_LINE> <INDENT> my_amenity = Amenity() <NEW_LINE> my_amenity.name = "Holberton" <NEW_LINE> my_amenity.my_number = 89 <NEW_LINE> my_amenity_json = my_amenity.to_dict() <NEW_LINE> my_new_my_amenity = Amenity(**my_amenity_json) <NEW_LINE> dict_attr = {'name': 'Holberton', 'my_number': 89, 'id': my_amenity.id, 'created_at': my_amenity.created_at, 'updated_at': my_amenity.updated_at} <NEW_LINE> for key, value in dict_attr.items(): <NEW_LINE> <INDENT> self.assertTrue(hasattr(my_new_my_amenity, key)) <NEW_LINE> self.assertEqual(getattr(my_new_my_amenity, key), value) <NEW_LINE> <DEDENT> self.assertTrue(hasattr(my_new_my_amenity, key)) <NEW_LINE> cls_name = getattr(my_new_my_amenity, key) <NEW_LINE> self.assertNotEqual(cls_name, my_amenity_json["__class__"])
Checks when it is passed a dictionary to the init method.
625941b70383005118ecf40c
def create(self, validated_data): <NEW_LINE> <INDENT> if User.objects.filter(email=validated_data['email']).exists(): <NEW_LINE> <INDENT> raise serializers.ValidationError('Email already in use, please use a different email address.') <NEW_LINE> <DEDENT> response = hunter.email_verifier(validated_data['email']) <NEW_LINE> if not response['smtp_server'] or not response['smtp_check']: <NEW_LINE> <INDENT> raise serializers.ValidationError('Email is not verified by hunter.io, please use a different email address.') <NEW_LINE> <DEDENT> enrichment = clearbit.Enrichment.find(email=validated_data['email'], stream=True) <NEW_LINE> validated_data['enrichment'] = enrichment <NEW_LINE> validated_data['user_type'] = 'admin' <NEW_LINE> user = User.objects.create(**validated_data) <NEW_LINE> user.set_password(validated_data['password']) <NEW_LINE> user.save() <NEW_LINE> return user
Create the object. :param validated_data: string
625941b7d10714528d5ffb07
def random_flip(img, y_random=False, x_random=False, return_param=False, copy=False): <NEW_LINE> <INDENT> y_flip, x_flip = False, False <NEW_LINE> if y_random: <NEW_LINE> <INDENT> y_flip = random.choice([True, False]) <NEW_LINE> <DEDENT> if x_random: <NEW_LINE> <INDENT> x_flip = random.choice([True, False]) <NEW_LINE> <DEDENT> if y_flip: <NEW_LINE> <INDENT> img = img[:, ::-1, :] <NEW_LINE> <DEDENT> if x_flip: <NEW_LINE> <INDENT> img = img[:, :, ::-1] <NEW_LINE> <DEDENT> if copy: <NEW_LINE> <INDENT> img = img.copy() <NEW_LINE> <DEDENT> if return_param: <NEW_LINE> <INDENT> return img, {'y_flip': y_flip, 'x_flip': x_flip} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return img
:param img: numpy型的数组 CHW格式 :param y_random: 是否进行竖直方向翻转 :param x_random: 是否进行水平方向翻转 :param return_param: 是否返回水平竖直方向翻转信息 :param copy: 是否创建一个新的img进行翻转 :return:取决于return_param,如果为false,则只返回翻转后的img,否则返回翻转后的img和翻转信息
625941b70fa83653e4656de5
def an_element(self): <NEW_LINE> <INDENT> c = self.cardinality() <NEW_LINE> return self[c>3 and 4 or (c>1 and -1 or 0)]
Returns a particular element of ``self``. EXAMPLES:: sage: RowStandardTableauTuples([[2],[2,1]]).an_element() ([[2, 4]], [[3, 5], [1]]) sage: RowStandardTableauTuples([[10],[],[]]).an_element() ([[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]], [], [])
625941b776d4e153a657e958
def encode_base64_ondemand(s): <NEW_LINE> <INDENT> if not all(c in string.ascii_letters for c in s): <NEW_LINE> <INDENT> s = ": " + base64.b64encode(s.encode()).decode() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = " " + s <NEW_LINE> <DEDENT> return s
Decode string to base64 if it isn't ascii. Also put leading ': ' so that LDAP knows the string is b64 encoded. >>> encode_base64_ondemand("Hej") ' Hej' >>> encode_base64_ondemand("Höj") ": SMO2ag=="
625941b799fddb7c1c9de1bb
def build(self): <NEW_LINE> <INDENT> layers = [] <NEW_LINE> in_filters = self.channels <NEW_LINE> for out_filters, stride, normalize in [(self.feature_map, 2, False), (self.feature_map*2, 2, True), (self.feature_map*4, 2, True), (self.feature_map*8, 1, True)]: <NEW_LINE> <INDENT> layers.extend(self.discriminator_block(in_filters, out_filters, stride, normalize)) <NEW_LINE> in_filters = out_filters <NEW_LINE> <DEDENT> layers.append(nn.Conv2d(out_filters, 1, 3, 1, 1)) <NEW_LINE> self.main = nn.Sequential(*layers)
Build Discriminator model
625941b721a7993f00bc7b11
def cms(self): <NEW_LINE> <INDENT> return M(c="cms")( M("Series", f="series")( M("Create", m="create"), M("Search"), M("View as Pages", f="blog"), ), M("Posts", f="post")( M("Create", m="create"), M("Search"), M("View as Pages", f="page"), ), )
CMS / Content Management System
625941b74428ac0f6e5ba619
def equals(self, rhs): <NEW_LINE> <INDENT> if isinstance(rhs, (float, int, np.float64, np.int64, np.float32, np.int32)): <NEW_LINE> <INDENT> for i in xrange(len(self._vectors)): <NEW_LINE> <INDENT> self._vectors[i].equals(rhs) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._check_type(rhs) <NEW_LINE> for i in xrange(len(self._vectors)): <NEW_LINE> <INDENT> self._vectors[i].equals(rhs._vectors[i])
Used as the assignment operator. If val is a scalar, all vector elements are set to the scalar value. If val is a vector, the two vectors are set equal. Parameters ---------- rhs : float or CompositeVector Right hand side term for assignment.
625941b74428ac0f6e5ba61a
def next(self, autoplay=True, track=None): <NEW_LINE> <INDENT> if track is None: <NEW_LINE> <INDENT> if self.__queue_has_tracks: <NEW_LINE> <INDENT> if not self.__remove_item_on_playback: <NEW_LINE> <INDENT> track = playlist.Playlist.next(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> track = self.pop(0) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if track is None: <NEW_LINE> <INDENT> self.__queue_has_tracks = False <NEW_LINE> <DEDENT> <DEDENT> if track is None and self.current_playlist is not self: <NEW_LINE> <INDENT> track = self.current_playlist.next() <NEW_LINE> <DEDENT> <DEDENT> if autoplay: <NEW_LINE> <INDENT> self.player.play(track) <NEW_LINE> <DEDENT> if not track: <NEW_LINE> <INDENT> event.log_event("playback_playlist_end", self, self.current_playlist) <NEW_LINE> <DEDENT> return track
Goes to the next track, either in the queue, or in the current playlist. If a track is passed in, that track is played :param autoplay: play the track in addition to returning it :type autoplay: bool :param track: if passed, play this track :type track: :class:`xl.trax.Track` .. note:: The following :doc:`events </xl/event>` will be emitted by this method: * `playback_playlist_end`: indicates that the end of the queue has been reached
625941b7ec188e330fd5a5ce
def main(): <NEW_LINE> <INDENT> os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'djange_rest_test.settings') <NEW_LINE> try: <NEW_LINE> <INDENT> from django.core.management import execute_from_command_line <NEW_LINE> <DEDENT> except ImportError as exc: <NEW_LINE> <INDENT> raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) from exc <NEW_LINE> <DEDENT> execute_from_command_line(sys.argv)
Run administrative tasks.
625941b723849d37ff7b2eb9
def test_main_no_users(self): <NEW_LINE> <INDENT> response = self.client.get('/') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertIn(b'All Users', response.data) <NEW_LINE> self.assertIn(b'<p>No users!</p>', response.data)
Ensure the main route behaves correctly when no users have been added to the database.
625941b782261d6c526ab2cb
def _packet_handler(self, count, slpack): <NEW_LINE> <INDENT> if slpack is None or (slpack == SLPacket.SLNOPACKET) or (slpack == SLPacket.SLERROR): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> type_ = slpack.get_type() <NEW_LINE> if self.debug: <NEW_LINE> <INDENT> print(type_) <NEW_LINE> <DEDENT> if type_ == SLPacket.TYPE_SLINF: <NEW_LINE> <INDENT> if self.debug: <NEW_LINE> <INDENT> print(SLPacket.TYPE_SLINF) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> elif type_ == SLPacket.TYPE_SLINFT: <NEW_LINE> <INDENT> if self.debug: <NEW_LINE> <INDENT> print("Complete INFO:", self._slclient.slconn.get_info_string()) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> trace = slpack.get_trace() <NEW_LINE> if trace is None: <NEW_LINE> <INDENT> if self.debug: <NEW_LINE> <INDENT> print("Blockette contains no trace") <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> self.stream += trace <NEW_LINE> self.stream.merge(-1) <NEW_LINE> return False
Custom packet handler that accumulates all waveform packets in a stream.
625941b7a79ad161976cbf6d
@task() <NEW_LINE> @skip_if_no_change("vulture", expect_files="dead_code.txt") <NEW_LINE> @timed() <NEW_LINE> def dead_code(): <NEW_LINE> <INDENT> with safe_cd(SRC): <NEW_LINE> <INDENT> if IS_TRAVIS: <NEW_LINE> <INDENT> command = "{0} vulture {1}".format(PYTHON, PROJECT_NAME).strip().split() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command = "{0} vulture {1}".format(PIPENV, PROJECT_NAME).strip().split() <NEW_LINE> <DEDENT> output_file_name = "dead_code.txt" <NEW_LINE> with open(output_file_name, "w") as outfile: <NEW_LINE> <INDENT> env = config_pythonpath() <NEW_LINE> subprocess.call(command, stdout=outfile, env=env) <NEW_LINE> <DEDENT> cutoff = 1000 <NEW_LINE> print("High cutt off for dead code because not even out of beta") <NEW_LINE> num_lines = sum(1 for line in open(output_file_name) if line) <NEW_LINE> if num_lines > cutoff: <NEW_LINE> <INDENT> print("Too many lines of dead code : {0}, max {1}".format(num_lines, cutoff)) <NEW_LINE> exit(-1)
This also finds code you are working on today!
625941b7462c4b4f79d1d4f8
def run_hsmnet_model(max_disp, entropy_threshold, level, scale_factor, weights, left_image, right_image, output_file ): <NEW_LINE> <INDENT> network = HSMNet(max_disp=max_disp, entropy_threshold=entropy_threshold, level=level, scale_factor=scale_factor, weights=weights) <NEW_LINE> left = cv2.imread(left_image) <NEW_LINE> right = cv2.imread(right_image) <NEW_LINE> disp, entropy = network.predict(left, right) <NEW_LINE> cv2.imwrite(output_file, disp)
This is for the command line entry point
625941b7be7bc26dc91cd42e
def update(self, commit=True, **kwargs): <NEW_LINE> <INDENT> for attr, value in self._preprocess_params(kwargs).iteritems(): <NEW_LINE> <INDENT> setattr(self, attr, value) <NEW_LINE> <DEDENT> return commit and self.save() or self
Update specific fields of a record.
625941b77b25080760e39283
def _get_version_from_servermap(self, mode, servermap=None, version=None): <NEW_LINE> <INDENT> if servermap and servermap.get_last_update()[0] == mode: <NEW_LINE> <INDENT> d = defer.succeed(servermap) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d = self._get_servermap(mode) <NEW_LINE> <DEDENT> def _get_version(servermap, v): <NEW_LINE> <INDENT> if v and v not in servermap.recoverable_versions(): <NEW_LINE> <INDENT> v = None <NEW_LINE> <DEDENT> elif not v: <NEW_LINE> <INDENT> v = servermap.best_recoverable_version() <NEW_LINE> <DEDENT> if not v: <NEW_LINE> <INDENT> raise UnrecoverableFileError("no recoverable versions") <NEW_LINE> <DEDENT> return (servermap, v) <NEW_LINE> <DEDENT> return d.addCallback(_get_version, version)
I return a Deferred that fires with (servermap, version). This function performs validation and a servermap update. If it returns (servermap, version), the caller can assume that: - servermap was last updated in mode. - version is recoverable, and corresponds to the servermap. If version and servermap are provided to me, I will validate that version exists in the servermap, and that the servermap was updated correctly. If version is not provided, but servermap is, I will validate the servermap and return the best recoverable version that I can find in the servermap. If the version is provided but the servermap isn't, I will obtain a servermap that has been updated in the correct mode and validate that version is found and recoverable. If neither servermap nor version are provided, I will obtain a servermap updated in the correct mode, and return the best recoverable version that I can find in there.
625941b7287bf620b61d3897
@pytest.fixture <NEW_LINE> def topic_json_importer(db, filename, study): <NEW_LINE> <INDENT> return TopicJsonImport(filename, study)
A topic json importer
625941b7be383301e01b52b5
def draw(self, cur_drawing): <NEW_LINE> <INDENT> for position in (self.start_x_position, self.end_x_position, self.start_y_position, self.end_y_position): <NEW_LINE> <INDENT> assert position != -1, "Need to set drawing coordinates." <NEW_LINE> <DEDENT> cur_y_pos = self.start_y_position <NEW_LINE> if self.scale_num: <NEW_LINE> <INDENT> y_pos_change = ((self.start_y_position * .95 - self.end_y_position) / self.scale_num) <NEW_LINE> <DEDENT> elif len(self._sub_components) > 0: <NEW_LINE> <INDENT> y_pos_change = ((self.start_y_position * .95 - self.end_y_position) / self.subcomponent_size()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> left_labels = [] <NEW_LINE> right_labels = [] <NEW_LINE> for sub_component in self._sub_components: <NEW_LINE> <INDENT> this_y_pos_change = sub_component.scale * y_pos_change <NEW_LINE> sub_component.start_x_position = self.start_x_position <NEW_LINE> sub_component.end_x_position = self.end_x_position <NEW_LINE> sub_component.start_y_position = cur_y_pos <NEW_LINE> sub_component.end_y_position = cur_y_pos - this_y_pos_change <NEW_LINE> sub_component._left_labels = [] <NEW_LINE> sub_component._right_labels = [] <NEW_LINE> sub_component.draw(cur_drawing) <NEW_LINE> left_labels += sub_component._left_labels <NEW_LINE> right_labels += sub_component._right_labels <NEW_LINE> cur_y_pos -= this_y_pos_change <NEW_LINE> <DEDENT> self._draw_labels(cur_drawing, left_labels, right_labels) <NEW_LINE> self._draw_label(cur_drawing, self._name)
Draw a chromosome on the specified template. Ideally, the x_position and y_*_position attributes should be set prior to drawing -- otherwise we're going to have some problems.
625941b7b57a9660fec336a8
def tableau_neg(self, mode): <NEW_LINE> <INDENT> return {"¬∨": ("α", [Neg(self.phi), Neg(self.psi)])}
¬(φ∨ψ) | ¬φ | ¬ψ
625941b76fece00bbac2d563
@respond_to('xkcd$', re.IGNORECASE) <NEW_LINE> def xkcd2(message): <NEW_LINE> <INDENT> xkcd1(message, 'latest')
method overloading to have just one parameter
625941b791af0d3eaac9b83c
def test_there_is_a_reject_moderation_url_that_hides_the_message(self): <NEW_LINE> <INDENT> self.client.login(username=self.owner.username, password='feroz') <NEW_LINE> public_message = self.private_message <NEW_LINE> public_message.public = True <NEW_LINE> public_message.save() <NEW_LINE> url = reverse( 'moderation_rejected', subdomain=public_message.writeitinstance.slug, kwargs={ 'slug': public_message.moderation.key }) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEquals(response.status_code, 200) <NEW_LINE> self.assertTemplateUsed(response, 'nuntium/moderation_rejected.html') <NEW_LINE> message = Message.objects.get(id=public_message.id) <NEW_LINE> self.assertFalse(message.public) <NEW_LINE> self.assertTrue(message.moderated)
This is the case when you proud owner of a writeitInstance think that the private message should not go anywhere and it should be hidden
625941b797e22403b379cdc1
def __init__(self, sink: Union[str, JavaObject]): <NEW_LINE> <INDENT> super(Sink, self).__init__(sink)
Constructor of Sink. :param sink: The java Sink object.
625941b745492302aab5e0e8
def Patch(height_index,width_index): <NEW_LINE> <INDENT> transpose_array = input_mat <NEW_LINE> height_slice = slice(height_index, height_index+PATCH_SIZE) <NEW_LINE> width_slice = slice(width_index, width_index+PATCH_SIZE) <NEW_LINE> patch = transpose_array[:, height_slice, width_slice] <NEW_LINE> mean_normalized_patch = [] <NEW_LINE> for i in range(patch.shape[0]): <NEW_LINE> <INDENT> mean_normalized_patch.append(patch[i] - MEAN_ARRAY[i]) <NEW_LINE> <DEDENT> return np.array(mean_normalized_patch)
Returns a mean-normalized patch, the top left corner of which is at (height_index, width_index) Inputs: height_index - row index of the top left corner of the image patch width_index - column index of the top left corner of the image patch Outputs: mean_normalized_patch - mean normalized patch of size (PATCH_SIZE, PATCH_SIZE) whose top left corner is at (height_index, width_index)
625941b7925a0f43d2549c9c
def _parse_package_version(version): <NEW_LINE> <INDENT> out = re.match("(([0-9]+):)?(.*)", version) <NEW_LINE> if out: <NEW_LINE> <INDENT> epoch = out.group(2) <NEW_LINE> version = out.group(3) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> epoch = None <NEW_LINE> <DEDENT> parts = version.split("-") <NEW_LINE> out = re.match("([0-9\\.]*)[a-z]*([0-9\\.]*)", parts[-1]) <NEW_LINE> if len(parts) > 1 and out: <NEW_LINE> <INDENT> upstream = "-".join(parts[:-1]) <NEW_LINE> debian = out.group(1) <NEW_LINE> ubuntu = out.group(2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> upstream = version <NEW_LINE> debian = None <NEW_LINE> ubuntu = None <NEW_LINE> <DEDENT> return epoch, upstream, debian, ubuntu
Dissect version in upstream, debian/ubuntu parts.
625941b73539df3088e2e173
def train_network(config: MuZeroConfig, storage: SharedStorage, replay_buffer: ReplayBuffer) -> None: <NEW_LINE> <INDENT> print('##### START TRAINING #####', flush=True) <NEW_LINE> network = Network() <NEW_LINE> learning_rate = config.lr_init * config.lr_decay_rate <NEW_LINE> optimizer = tf.keras.optimizers.RMSprop(learning_rate, momentum=config.momentum) <NEW_LINE> for i in trange(config.training_steps): <NEW_LINE> <INDENT> if i % config.checkpoint_interval == 0: <NEW_LINE> <INDENT> storage.save_network(i, network) <NEW_LINE> <DEDENT> batch = replay_buffer.sample_batch(config.num_unroll_steps, config.td_steps) <NEW_LINE> update_weights(optimizer, network, batch, config.weight_decay) <NEW_LINE> <DEDENT> storage.save_network(config.training_steps, network) <NEW_LINE> print("NB NW IN STORAGE", len(storage._networks), flush=True)
Creates a new network object which is trained on available data for n training steps Parameters ---------- config : MuZeroConfig The configuration of the MuZero agent. storage : SharedStorage A shared object containing networks. replay_buffer : ReplayBuffer The replay buffer of ???
625941b792d797404e303fb2
def __str__(self): <NEW_LINE> <INDENT> return "Point in {home} vs. {away}".format( away=self.game.opponent, home=self.game.team.name)
Get a string representation of the instance.
625941b7d58c6744b4257a89
def mySort(data,col): <NEW_LINE> <INDENT> newList = sorted(data,key=lambda k: k[col]) <NEW_LINE> firstDict = newList[0] <NEW_LINE> return firstDict['Last'] + " " + firstDict['First']
Return the last name and first name of the first item in a sorted list data -- list of dictionaries col -- (key) to sort on returns -- the first item in the sorted list as a string of just: firstName lastName
625941b738b623060ff0ac17
def test_deploying_service_registry_with_changed_min_price( deploy_tester_contract: Callable, custom_token: Contract ) -> None: <NEW_LINE> <INDENT> deploy_tester_contract( CONTRACT_SERVICE_REGISTRY, _token_for_registration=custom_token.address, _controller=DEPLOYER_ADDRESS, _initial_price=100, _price_bump_numerator=DEFAULT_BUMP_NUMERATOR, _price_bump_denominator=DEFAULT_BUMP_DENOMINATOR, _decay_constant=DEFAULT_DECAY_CONSTANT, _min_price=100, _registration_duration=DEFAULT_REGISTRATION_DURATION, )
ServiceRegistry's constructor must allow changing `min_price`
625941b732920d7e50b27ff4
def __init__(self, channel): <NEW_LINE> <INDENT> self.AdicionarContato = channel.unary_unary( '/Greeter/AdicionarContato', request_serializer=helloworld__pb2.Contato.SerializeToString, response_deserializer=helloworld__pb2.Resposta.FromString, ) <NEW_LINE> self.RemoverContato = channel.unary_unary( '/Greeter/RemoverContato', request_serializer=helloworld__pb2.Nome.SerializeToString, response_deserializer=helloworld__pb2.Resposta.FromString, ) <NEW_LINE> self.ListarContatos = channel.unary_unary( '/Greeter/ListarContatos', request_serializer=helloworld__pb2.Request.SerializeToString, response_deserializer=helloworld__pb2.Resposta.FromString, )
Constructor. Args: channel: A grpc.Channel.
625941b7ab23a570cc24ffa8
def init_hidden(self, batch_size): <NEW_LINE> <INDENT> return torch.zeros(self.num_layers, batch_size, self.hidden_size)
:param batch_size: input.shape[0] :return: zeroed initialized hidden states
625941b76e29344779a6243e
def search(self, key) -> KeyedItem: <NEW_LINE> <INDENT> return SinglyUnsortedBasedDictionnary.search(self, key)
O(n)
625941b7d4950a0f3b08c183
def plot_abs_change(*dicts): <NEW_LINE> <INDENT> argnum = len(dicts) <NEW_LINE> thre_range = np.arange(0.01,0.05,0.0025) <NEW_LINE> label_list = [] <NEW_LINE> for arg_i in np.arange(argnum): <NEW_LINE> <INDENT> if dicts[arg_i]['methods'] != 'lasso': <NEW_LINE> <INDENT> label = dicts[arg_i]['methods'] <NEW_LINE> label_list.append(label) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label = dicts[arg_i]['methods'] + str(dicts[arg_i]['shrunk_param']) <NEW_LINE> label_list.append(label) <NEW_LINE> <DEDENT> <DEDENT> abs_change_dic = {} <NEW_LINE> for arg_i in np.arange(argnum): <NEW_LINE> <INDENT> abs_change_dic[label_list[arg_i]] = np.array([ check_abs_change_portfolio(normalized_propotion_array(dicts[arg_i]['sol_output_array'],thre=i)) for i in thre_range ]) <NEW_LINE> <DEDENT> fig = plt.figure() <NEW_LINE> ax1 = fig.add_subplot(111) <NEW_LINE> for arg_i in np.arange(argnum): <NEW_LINE> <INDENT> ax1.plot(thre_range, abs_change_dic[label_list[arg_i]], label=label_list[arg_i], color=plt.cm.jet(arg_i*(1./argnum))) <NEW_LINE> <DEDENT> plt.title("Abs values of change") <NEW_LINE> plt.xlabel("propotion of threshold") <NEW_LINE> plt.ylabel("sum of absolute value of change") <NEW_LINE> plt.legend(loc="upper left") <NEW_LINE> fig.show()
input paramater ---------------------- dicts : variable length of dicts which is output of roling_portfolio function. Here we assume arguments of methods is different except for 'lasso'. Ex. plot_abs_change(emp_roling_dict, lasso_roling_dict, shrunk_roling_dict) --------------------- returns --------------------- None ---------------------
625941b79f2886367277a6ba
def potential_of_mean_force(self, collective_variables, mintozero=True, xlabel='CV', xlim=None, ylim=None): <NEW_LINE> <INDENT> fig = plt.figure() <NEW_LINE> ax = fig.add_subplot(1, 1, 1) <NEW_LINE> plt.xlabel(xlabel) <NEW_LINE> plt.ylabel('A [kJ/mol]') <NEW_LINE> for cv in collective_variables: <NEW_LINE> <INDENT> free_energy = (- self.static_bias[cv] + self.static_bias[cv].max()) <NEW_LINE> if not mintozero: <NEW_LINE> <INDENT> free_energy = -self.static_bias[cv] <NEW_LINE> <DEDENT> plt.plot(free_energy) <NEW_LINE> <DEDENT> plt.xlim(xlim) <NEW_LINE> plt.ylim(ylim) <NEW_LINE> return ax
Create PMF plot for one or several collective variables. Parameters ---------- collective_variables : list List of CVs you'd like to plot. These should be supplied in the form of a list of column names, or an instance of pd.Index using df.columns mintozero : bool, True Determines whether or not to shift PMF so that the minimum is at zero. xlabel : string Label for the x axis. xlim : tuple/list Limits for x axis in plot (i.e. [x_min, x_max]). ylim : tuple/list Limits for y axis in plot (i.e. [y_min, y_max]). Returns ------- axes: matplotlib.AxesSubplot
625941b7d8ef3951e3243366
def __init__(self, server_uri): <NEW_LINE> <INDENT> plain_server_uri = server_uri <NEW_LINE> if not 'openrdf-sesame' in server_uri: <NEW_LINE> <INDENT> server_uri = '%s/openrdf-sesame' % server_uri <NEW_LINE> <DEDENT> self.server_uri = server_uri <NEW_LINE> self.repository_url_tmplt = self.server_uri + '/repositories/%s' <NEW_LINE> server_update_uri = '%s/openrdf-workbench' % plain_server_uri <NEW_LINE> self.repository_url_update_tmplt = server_update_uri + '/repositories/%s/update'
initializes the client :param server_uri: URL of the server
625941b7507cdc57c6306afb
def test_1_3_keto_enol_enumeration(): <NEW_LINE> <INDENT> assert enumerate_tautomers_smiles('C1(=CCCCC1)O') == {'OC1=CCCCC1', 'O=C1CCCCC1'}
Enumerate 1,3 keto/enol tautomer.
625941b71f037a2d8b946028
def remove_item(self, path): <NEW_LINE> <INDENT> path = self._path(path) <NEW_LINE> if util.isfile(path): <NEW_LINE> <INDENT> util.remove(str(path)) <NEW_LINE> <DEDENT> elif util.isdir(path): <NEW_LINE> <INDENT> util.rmtree(path)
Remove item at path, regardless if it's a file or directory.
625941b7566aa707497f43a3
def start(self): <NEW_LINE> <INDENT> err = mzo.init() <NEW_LINE> if err: <NEW_LINE> <INDENT> raise(Exception(err)) <NEW_LINE> <DEDENT> mzo.set_input_mode(mzo.input("Esc")) <NEW_LINE> mzo.set_output_mode(mzo.output("256")) <NEW_LINE> try: <NEW_LINE> <INDENT> self.prompt(mzo, self.registrar) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> mzo.close() <NEW_LINE> for o in self.registrar.registry.values(): <NEW_LINE> <INDENT> q = o["data"] <NEW_LINE> self.responses[q.name] = q.result
TODO: ...
625941b7004d5f362079a160
def finish(self): <NEW_LINE> <INDENT> if self.request: <NEW_LINE> <INDENT> response = PyMsgJson().set_ID(self.request.get_ID()).set_status(True) <NEW_LINE> response.set_attribute("processor_name", self.class_name) <NEW_LINE> print("self._finished----输入请求为:", self._finished, self.request) <NEW_LINE> if not self._finished: <NEW_LINE> <INDENT> self.process() <NEW_LINE> self._finished = True <NEW_LINE> <DEDENT> response.set_payload(self.get_output_destination()) <NEW_LINE> self.response = response <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.response = None
完成Response的组合构建,没有使用全局的Response去分部构建Response返回;而是在一个方法中进行。
625941b7627d3e7fe0d68c77
def run_pipeline(pipe_config): <NEW_LINE> <INDENT> default_site = get_default_site() <NEW_LINE> try: <NEW_LINE> <INDENT> p = Pipeline.create(pipe_config) <NEW_LINE> status = p.run() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> set_default_site(default_site) <NEW_LINE> <DEDENT> return status
Run a pipeline as defined by a particular configuration Parameters ---------- pipe_config : `dict` The configuration dictionary Returns ------- status : `int` Usual unix convention of 0 -> success, non-zero is an error code
625941b726238365f5f0ec92
def dump(data): <NEW_LINE> <INDENT> for i in range(regsize): <NEW_LINE> <INDENT> for j in range(len(ports)): <NEW_LINE> <INDENT> gpio.output(ports[j], data[j][regsize - 1 - i]) <NEW_LINE> <DEDENT> gpio.output(srckl, True) <NEW_LINE> sleep(pulsewidth) <NEW_LINE> gpio.output(srckl, False) <NEW_LINE> <DEDENT> gpio.output(rckl, True) <NEW_LINE> sleep(pulsewidth) <NEW_LINE> gpio.output(rckl, False)
Dump data as list of bits onto ports
625941b766656f66f7cbbfd3
def _on_title_modified ( self, title ): <NEW_LINE> <INDENT> self.title = str( title )
Handles the page title being changed.
625941b71d351010ab855947
def __init__(self): <NEW_LINE> <INDENT> self._client = dwollav2.Client(key=settings.DWOLLA_KEY, secret=settings.DWOLLA_SECRET, environment='sandbox') <NEW_LINE> self.refresh()
Initializes an instance of the DwollaClient
625941b7dc8b845886cb535d
def __init__(self, body=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._body = None <NEW_LINE> self.discriminator = None <NEW_LINE> if body is not None: <NEW_LINE> <INDENT> self.body = body
UntagDeviceResponse - a model defined in huaweicloud sdk
625941b715baa723493c3d9b
def declareRNG( self, rngId = None, seed = 1 ): <NEW_LINE> <INDENT> if rngId == None: <NEW_LINE> <INDENT> raise Exception( "ERROR: No index given in attempt to declare RNG." ) <NEW_LINE> <DEDENT> if rngId < 0 or rngId > data_spec_constants.MAX_RNGS: <NEW_LINE> <INDENT> raise Exception( "ERROR: Requested RNG index (%d) out of range." % rngId ) <NEW_LINE> <DEDENT> if self.rngList[rngId] == False: <NEW_LINE> <INDENT> self.declareRNG(rngId = rngId, seed = seed) <NEW_LINE> self.rngList[rngId] = True
If a specified random number generator has not yet been declared, do so. Info on which RNGs have been declared is held in the spec object, in a list called rngList.
625941b73eb6a72ae02ec303
def transform(self, M, t): <NEW_LINE> <INDENT> self.anchor = np.dot(M, self.anchor) + t <NEW_LINE> self.axis = np.dot(M, self.axis)
Apply a Euclidean transformation on the 3D crease.
625941b7fff4ab517eb2f262
def process(form, application): <NEW_LINE> <INDENT> form.nom_application.process_data(application["nom_application"]) <NEW_LINE> form.code_application.process_data(application["code_application"]) <NEW_LINE> form.desc_application.process_data(application["desc_application"]) <NEW_LINE> return form
Methode qui rempli le formulaire par les données de l'éléments concerné Avec pour paramètres un formulaire et une application
625941b791f36d47f21ac31f
def fetch(self, key, params, force_this_node=False): <NEW_LINE> <INDENT> if not self._host_locator.still_valid(): <NEW_LINE> <INDENT> raise ListNotValidError() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> url = self._host_locator.locate(key) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise AttributeError("`host_locator` should implement `locate()` method") <NEW_LINE> <DEDENT> if force_this_node or (self._this_host == url): <NEW_LINE> <INDENT> return self._sf.call( self._fn, key, params=params) <NEW_LINE> <DEDENT> logger.info(f"calling key `{key}` on {url}") <NEW_LINE> resp = self._requests.post(f"{url}/{key}", json = json.dumps(params)) <NEW_LINE> if resp.status_code != 200: <NEW_LINE> <INDENT> logger.warning("Failed calling {url}, receiving status code {resp.status_code}") <NEW_LINE> raise FetchError(f"Failed to fetch data from {url}") <NEW_LINE> <DEDENT> return resp.json()
call `fn` only once, coalesced by key :param key: a unique identifier, to coalesce same requests :param params: a key-value (map), that will be translated to json, if requesting to another server :param force_this_node: ensure that if the service discovery is broken (network-partition, delay update, etc) sfc doesn't fall into loop calling each other until everything is used up Any error coming from your function, will be directly raised back returning json map, so design your `fetching_fn` to return as json
625941b7377c676e91271fd3
def submit_time(self, section_name): <NEW_LINE> <INDENT> _submit_time = self._read_value(section_name, self._submit_time) <NEW_LINE> self._validate(r'\d{4}/\d{2}/\d{2}-\d{2}:\d{2}:\d{2}', self._submit_time, _submit_time) <NEW_LINE> return _submit_time
Return config SUBMIT_TIME option in section_name section
625941b7d7e4931a7ee9dd45
def _verify_html_file_report(self, report_store, expected_data): <NEW_LINE> <INDENT> report_html_filename = report_store.links_for(self.course.id)[0][0] <NEW_LINE> report_path = report_store.path_to(self.course.id, report_html_filename) <NEW_LINE> with report_store.storage.open(report_path) as html_file: <NEW_LINE> <INDENT> html_file_data = html_file.read() <NEW_LINE> for data in expected_data: <NEW_LINE> <INDENT> self.assertTrue(data in html_file_data)
Verify grade report data.
625941b77d43ff24873a2acd
def Extension(self) -> str: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.path.splitext(self.string)[1] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LogError (uMsg=u'can\'t get file extension [%s]' % self.string, oException=e) <NEW_LINE> return ""
Returns the extension of a file (with dot) :return: extension as str
625941b791af0d3eaac9b83d
def read_data(self, file): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> reader = csv.DictReader(file) <NEW_LINE> for row in reader: <NEW_LINE> <INDENT> self.csv_data.append(row) <NEW_LINE> for k,v in row.items(): <NEW_LINE> <INDENT> if k in self.mapped_data: <NEW_LINE> <INDENT> self.mapped_data[k].append(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mapped_data[k] = [v] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> title(e) <NEW_LINE> sys.exit(1)
Read our csv & build our dataset @param file (file handle)
625941b7d10714528d5ffb08
def write_pil(filename,image,type="png"): <NEW_LINE> <INDENT> image = numpy2pil(image) <NEW_LINE> image.save(filename,type)
Write a NumPy array as a PNG image using PIL.
625941b729b78933be1e54e3