code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def decrase_health(self): <NEW_LINE> <INDENT> self.health -= self.individual_config['frame_health_reduce'] * self.poison
decrease own health if called the amount is increased by own poisoning
625941bad6c5a10208143ed1
def test_deploy_cert(plugin, temp_dir, domains): <NEW_LINE> <INDENT> cert = crypto_util.gen_ss_cert(util.KEY, domains) <NEW_LINE> cert_path = os.path.join(temp_dir, "cert.pem") <NEW_LINE> with open(cert_path, "w") as f: <NEW_LINE> <INDENT> f.write(OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_PEM, cert)) <NEW_LINE> <DEDENT> for domain in domains: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> plugin.deploy_cert(domain, cert_path, util.KEY_PATH, cert_path, cert_path) <NEW_LINE> plugin.save() <NEW_LINE> <DEDENT> except le_errors.Error as error: <NEW_LINE> <INDENT> logger.error("Plugin failed to deploy certificate for %s:", domain) <NEW_LINE> logger.exception(error) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> if not _save_and_restart(plugin, "deployed"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> success = True <NEW_LINE> for domain in domains: <NEW_LINE> <INDENT> verify = functools.partial(validator.Validator().certificate, cert, domain, "127.0.0.1", plugin.https_port) <NEW_LINE> if not _try_until_true(verify): <NEW_LINE> <INDENT> logger.error("Could not verify certificate for domain %s", domain) <NEW_LINE> success = False <NEW_LINE> <DEDENT> <DEDENT> if success: <NEW_LINE> <INDENT> logger.info("HTTPS validation succeeded") <NEW_LINE> <DEDENT> return success
Tests deploy_cert returning True if the tests are successful
625941b97b25080760e392e4
def get_database_url(settings): <NEW_LINE> <INDENT> if not settings.database_source: <NEW_LINE> <INDENT> return settings.database <NEW_LINE> <DEDENT> global _DB_URL_CACHED, _DB_URL_REFRESH_TIME, _DB_URL_REFRESH_JITTER <NEW_LINE> if _DB_URL_REFRESH_TIME > time.time(): <NEW_LINE> <INDENT> return _DB_URL_CACHED <NEW_LINE> <DEDENT> _DB_URL_REFRESH_TIME = time.time() + (insecure_random.random() * _DB_URL_REFRESH_JITTER) <NEW_LINE> try: <NEW_LINE> <INDENT> url = subprocess.check_output([settings.database_source]) <NEW_LINE> <DEDENT> except subprocess.CalledProcessError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return url.strip()
Given settings, load a database URL either from our executable source or the bare string.
625941b9d4950a0f3b08c1dd
def test_update_id(self): <NEW_LINE> <INDENT> obj = RuleObject.get(self.uuid) <NEW_LINE> serializer_data1 = json.loads(json.dumps(self.data1)) <NEW_LINE> with self.assertRaises(IDUpdateNotPermitted): <NEW_LINE> <INDENT> serializer = RuleObjectSerializer(obj, data=serializer_data1[0], partial=True, context={'pk': self.uuid1}) <NEW_LINE> serializer.update(obj, serializer_data1[0])
test case to test update function with id update
625941ba67a9b606de4a7d46
def validatejson(data_input, mode): <NEW_LINE> <INDENT> LOGGER.info('validating JSON; Mode: {}'.format(mode)) <NEW_LINE> passed = False <NEW_LINE> if mode >= MODE.NONE: <NEW_LINE> <INDENT> passed = True <NEW_LINE> <DEDENT> if mode >= MODE.SIMPLE: <NEW_LINE> <INDENT> name = data_input.file <NEW_LINE> (mtype, encoding) = mimetypes.guess_type(name, strict=False) <NEW_LINE> passed = data_input.data_format.mime_type in {mtype, FORMATS.JSON.mime_type} <NEW_LINE> <DEDENT> if mode >= MODE.STRICT: <NEW_LINE> <INDENT> import json <NEW_LINE> try: <NEW_LINE> <INDENT> with open(data_input.file) as f: <NEW_LINE> <INDENT> json.load(f) <NEW_LINE> <DEDENT> passed = True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> passed = False <NEW_LINE> <DEDENT> <DEDENT> return passed
JSON validation function :param data_input: :class:`ComplexInput` :param pywps.validator.mode.MODE mode: This function validates JSON input based on given validation mode. Following happens, if `mode` parameter is given: `MODE.NONE` No validation, returns `True`. `MODE.SIMPLE` Returns `True` if the mime type is correct. `MODE.STRICT` Returns `True` if the content can be interpreted as a json object.
625941ba796e427e537b044c
def list_randitem_norep(n,items,excluded_values=[]): <NEW_LINE> <INDENT> return _list_rand0(n,items,False,excluded_values)
Generate a list of random items selected from a list without replacements.
625941ba91af0d3eaac9b89e
@app.route('/horoscope_results') <NEW_LINE> def horoscope_results(): <NEW_LINE> <INDENT> horoscope_sign = request.args.get("horoscope_sign") <NEW_LINE> users_personality = HOROSCOPE_PERSONALITIES[horoscope_sign] <NEW_LINE> lucky_number = random.randint(1, 99) <NEW_LINE> context = { 'name': request.args.get('users_name'), 'horoscope_sign': horoscope_sign, 'personality': users_personality, 'lucky_number': lucky_number } <NEW_LINE> return render_template('horoscope_results.html', **context)
Shows the user the result for their chosen horoscope.
625941baf7d966606f6a9e92
def append_random_number_to_filename(self, local_img_file): <NEW_LINE> <INDENT> date = datetime.datetime.now() <NEW_LINE> date_string = date.strftime("%m-%d-%Y") <NEW_LINE> return "%s-%s-glitched.%s" % (local_img_file.split(".")[0], date_string, local_img_file.split(".")[1])
Prevent overwriting of original file
625941ba5fc7496912cc380f
def build(tree, control, level = 2): <NEW_LINE> <INDENT> tree = Builder.getDict(tree) <NEW_LINE> if level == 1: <NEW_LINE> <INDENT> return Builder.constructBoxLayout(tree, control) <NEW_LINE> <DEDENT> if level == 2: <NEW_LINE> <INDENT> return Builder.buildBoxes(tree, control) <NEW_LINE> <DEDENT> elif level == 3: <NEW_LINE> <INDENT> return Builder.buildTabs(tree, control)
API-call for building a QVBoxLayout from a given tree. The tree can be a path to a json file or a dictionary. The control has to implement the getCallback method that provides a callable for a given key. Level 2 returns layout containing QGroupBoxes, containing QWidgets, Level 3 returns layout conaining QTabWidgets, containing QGroupBoxes, containing QWidgets.
625941ba63b5f9789fde6f6f
def work(self, burst=False): <NEW_LINE> <INDENT> setup_loghandlers() <NEW_LINE> self._install_signal_handlers() <NEW_LINE> self.pm = ProcessManager() <NEW_LINE> did_perform_work = False <NEW_LINE> self.register_birth() <NEW_LINE> self.log.info("RQ worker {0!r} started, version {1}".format(self.key, VERSION)) <NEW_LINE> self.set_state(WorkerStatus.STARTED) <NEW_LINE> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not burst or not self.pm.has_running_jobs: <NEW_LINE> <INDENT> self.check_for_suspension(burst) <NEW_LINE> <DEDENT> if self.should_run_maintenance_tasks: <NEW_LINE> <INDENT> self.clean_registries() <NEW_LINE> <DEDENT> if self._stop_requested: <NEW_LINE> <INDENT> self.log.info('Stopping on request') <NEW_LINE> break <NEW_LINE> <DEDENT> timeout = None if burst else max(1, self.default_worker_ttl - 60) <NEW_LINE> result = self.dequeue_job_and_maintain_ttl(timeout) <NEW_LINE> if result is None: <NEW_LINE> <INDENT> if burst: <NEW_LINE> <INDENT> self.log.info("RQ worker {0!r} done, quitting".format(self.key)) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> except StopRequested: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> job, queue = result <NEW_LINE> self.execute_job(job) <NEW_LINE> self.heartbeat() <NEW_LINE> if job.get_status() == JobStatus.FINISHED: <NEW_LINE> <INDENT> queue.enqueue_dependents(job) <NEW_LINE> <DEDENT> did_perform_work = True <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if not self.is_horse: <NEW_LINE> <INDENT> self.register_death() <NEW_LINE> <DEDENT> <DEDENT> return did_perform_work
Starts the work loop
625941baf548e778e58cd406
def torus(center=(0., 0., 0.), normal=(0., 0., 1.), radius=1., color='', cradius=.25, samples=20, csamples=20, _self=cmd): <NEW_LINE> <INDENT> from math import cos, sin, pi <NEW_LINE> if color and isinstance(color, str): <NEW_LINE> <INDENT> color = list(_self.get_color_tuple(color)) <NEW_LINE> <DEDENT> obj = [] <NEW_LINE> axis = cpv.cross_product(normal, (0., 0., 1.)) <NEW_LINE> angle = -cpv.get_angle(normal, (0., 0., 1.)) <NEW_LINE> matrix = cpv.rotation_matrix(angle, cpv.normalize(axis)) <NEW_LINE> obj_vertex = lambda x, y, z: obj.extend([VERTEX] + cpv.add(center, cpv.transform(matrix, [x, y, z]))) <NEW_LINE> obj_normal = lambda x, y, z: obj.extend([NORMAL] + cpv.transform(matrix, [x, y, z])) <NEW_LINE> r = radius <NEW_LINE> cr = cradius <NEW_LINE> rr = 1.5 * cr <NEW_LINE> dv = 2 * pi / csamples <NEW_LINE> dw = 2 * pi / samples <NEW_LINE> v = 0.0 <NEW_LINE> w = 0.0 <NEW_LINE> while w < 2 * pi: <NEW_LINE> <INDENT> v = 0.0 <NEW_LINE> c_w = cos(w) <NEW_LINE> s_w = sin(w) <NEW_LINE> c_wdw = cos(w + dw) <NEW_LINE> s_wdw = sin(w + dw) <NEW_LINE> obj.append(BEGIN) <NEW_LINE> obj.append(TRIANGLE_STRIP) <NEW_LINE> if color: <NEW_LINE> <INDENT> obj.append(COLOR) <NEW_LINE> obj.extend(color) <NEW_LINE> <DEDENT> while v < 2 * pi + dv: <NEW_LINE> <INDENT> c_v = cos(v) <NEW_LINE> s_v = sin(v) <NEW_LINE> c_vdv = cos(v + dv) <NEW_LINE> s_vdv = sin(v + dv) <NEW_LINE> obj_normal( (r + rr * c_v) * c_w - (r + cr * c_v) * c_w, (r + rr * c_v) * s_w - (r + cr * c_v) * s_w, (rr * s_v - cr * s_v)) <NEW_LINE> obj_vertex( (r + cr * c_v) * c_w, (r + cr * c_v) * s_w, cr * s_v) <NEW_LINE> obj_normal( (r + rr * c_vdv) * c_wdw - (r + cr * c_vdv) * c_wdw, (r + rr * c_vdv) * s_wdw - (r + cr * c_vdv) * s_wdw, rr * s_vdv - cr * s_vdv) <NEW_LINE> obj_vertex( (r + cr * c_vdv) * c_wdw, (r + cr * c_vdv) * s_wdw, cr * s_vdv) <NEW_LINE> v += dv <NEW_LINE> <DEDENT> obj.append(END) <NEW_LINE> w += dw <NEW_LINE> <DEDENT> return obj
Generate and return a torus CGO with given center, normal and ring radius.
625941ba5166f23b2e1a4fe3
def activate(self): <NEW_LINE> <INDENT> return self
Turn on the executive
625941baa8370b771705272a
def zoom_out(self, x, y): <NEW_LINE> <INDENT> pass
Zoom out and center at the coordinates (x, y).
625941ba442bda511e8be2af
def rand_proxy(limit = ''): <NEW_LINE> <INDENT> if limit == '': <NEW_LINE> <INDENT> limit = config.Config['rand_proxy_limit'] <NEW_LINE> <DEDENT> with conn: <NEW_LINE> <INDENT> cursor = conn.cursor() <NEW_LINE> cursor.execute('SELECT protocol,ip FROM pool WHERE status = \'5\' ORDER BY random() LIMIT ' + str(limit)) <NEW_LINE> values = cursor.fetchall() <NEW_LINE> cursor.close() <NEW_LINE> <DEDENT> return values
随机获取数据库中指定条数数据, 如果没有设置limit,则按照config中的默认条数进行返回
625941ba97e22403b379ce23
def save_model_loss(self, loss_li): <NEW_LINE> <INDENT> metric_file = codecs.open(self.out_dir + '/metrics.log', 'a', 'utf-8') <NEW_LINE> metric_comparison_file = codecs.open('./code/output_dir/' + self.args.domain + '/metrics.log', 'a', 'utf-8') <NEW_LINE> metric_file.write('Final loss: %s' % (loss_li[-1]) + "\n") <NEW_LINE> metric_file.write('Loss development: %s' % (loss_li) + "\n" + "\n") <NEW_LINE> metric_comparison_file.write('Final loss: %s' % (loss_li[-1]) + "\n") <NEW_LINE> metric_comparison_file.write('Loss development: %s' % (loss_li) + "\n" + "\n") <NEW_LINE> epoch_li = [epoch for epoch in range(1, self.args.epochs + 1)] <NEW_LINE> fig, ax = plt.subplots(figsize=(16, 8)) <NEW_LINE> ax.set_xlabel("Epoch", fontsize=18, weight="bold") <NEW_LINE> ax.set_ylabel("Loss", fontsize=18, weight="bold") <NEW_LINE> ax.set_title('Model loss', fontsize=20, weight="bold") <NEW_LINE> plt.plot(epoch_li, loss_li) <NEW_LINE> plt.savefig(self.out_dir + "/model_loss.pdf", format="pdf") <NEW_LINE> plt.savefig(self.out_dir + "/model_loss.png", format="png")
Creates plots of the training loss and saves them as .png and .pdf files. Args: loss_li: List that contains the model loss for every epoch Returns:
625941babe383301e01b5316
def _getAST(content): <NEW_LINE> <INDENT> content = '\n'.join(content.splitlines(0)) <NEW_LINE> errlineno = None <NEW_LINE> ast_ = None <NEW_LINE> try: <NEW_LINE> <INDENT> ast_ = _quietCompilerParse(content) <NEW_LINE> <DEDENT> except SyntaxError as ex: <NEW_LINE> <INDENT> errlineno = ex.lineno <NEW_LINE> log.debug("compiler parse #1: syntax error on line %d", errlineno) <NEW_LINE> <DEDENT> except parser.ParserError as ex: <NEW_LINE> <INDENT> log.debug("compiler parse #1: parse error") <NEW_LINE> lfContent = content.replace("\r\n", "\n").replace("\r", "\n") <NEW_LINE> try: <NEW_LINE> <INDENT> _quietCompile(lfContent, "dummy.py", "exec") <NEW_LINE> <DEDENT> except SyntaxError as ex2: <NEW_LINE> <INDENT> errlineno = ex2.lineno <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if errlineno is None: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> if errlineno is not None: <NEW_LINE> <INDENT> lines = content.splitlines(1) <NEW_LINE> offender = lines[errlineno-1] <NEW_LINE> log.info("syntax error on line %d: %r: trying to recover", errlineno, offender) <NEW_LINE> indent = '' <NEW_LINE> for i in range(0, len(offender)): <NEW_LINE> <INDENT> if offender[i] in " \t": <NEW_LINE> <INDENT> indent += offender[i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> lines[errlineno-1] = indent+"pass"+"\n" <NEW_LINE> newContent = ''.join(lines) <NEW_LINE> errlineno2 = None <NEW_LINE> try: <NEW_LINE> <INDENT> ast_ = _quietCompilerParse(newContent) <NEW_LINE> <DEDENT> except SyntaxError as ex: <NEW_LINE> <INDENT> errlineno2 = ex.lineno <NEW_LINE> log.debug("compiler parse #2: syntax error on line %d", errlineno) <NEW_LINE> <DEDENT> except parser.ParserError as ex: <NEW_LINE> <INDENT> log.debug("compiler parse #2: parse error") <NEW_LINE> lfContent = newContent.replace("\r\n", "\n").replace("\r", "\n") <NEW_LINE> try: <NEW_LINE> <INDENT> _quietCompile(lfContent, "dummy.py", "exec") <NEW_LINE> <DEDENT> except SyntaxError as ex2: <NEW_LINE> <INDENT> errlineno2 = ex2.lineno <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if errlineno2 is None: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> if ast_ is not None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif errlineno2 == errlineno: <NEW_LINE> <INDENT> raise ValueError("cannot recover from syntax error: line %d" % errlineno) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("cannot recover from multiple syntax errors: " "line %d and then %d" % (errlineno, errlineno2)) <NEW_LINE> <DEDENT> <DEDENT> if ast_ is None: <NEW_LINE> <INDENT> raise ValueError("could not generate AST") <NEW_LINE> <DEDENT> return ast_
Return an AST for the given Python content. If cannot, raise an error describing the problem.
625941ba236d856c2ad44668
def sequence_input(sequence): <NEW_LINE> <INDENT> position_feq = [] <NEW_LINE> with open(sequence) as seq: <NEW_LINE> <INDENT> lines = seq.readlines()[1:] <NEW_LINE> for i in range(len(lines)): <NEW_LINE> <INDENT> pos_prob = [] <NEW_LINE> try: <NEW_LINE> <INDENT> pos_prob.append(lines[i].strip('\n').split(',')[1]) <NEW_LINE> pos_prob.append(lines[i].strip('\n').split(',')[2]) <NEW_LINE> pos_prob.append(lines[i].strip('\n').split(',')[3]) <NEW_LINE> pos_prob.append(lines[i].strip('\n').split(',')[4]) <NEW_LINE> position_feq.append(pos_prob) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pos_prob.append(lines[i].strip('\n').split('\t')[1]) <NEW_LINE> pos_prob.append(lines[i].strip('\n').split('\t')[2]) <NEW_LINE> pos_prob.append(lines[i].strip('\n').split('\t')[3]) <NEW_LINE> pos_prob.append(lines[i].strip('\n').split('\t')[4]) <NEW_LINE> position_feq.append(pos_prob) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Check the input file. \n It should be tab or comma separated") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return(position_feq)
takes in sequences from base_content that are in .csv format and frequencies per position to use in simulating sequences using a 1st order Markov Model. Parameters ---------- sequence : csv file with columns of sequences (A, T, G, C) Returns ------- position_frequencies : list of lists per position base frequencies a list for each position
625941bafbf16365ca6f6047
def to_rst(self): <NEW_LINE> <INDENT> return ".. code::\n\n" + text.indent(self.attributes['text'], 3) + "\n"
Return the rst representation of this tag and it's children.
625941ba046cf37aa974cbd4
def get_post_archive_url(self): <NEW_LINE> <INDENT> return self.get_absolute_url()
get the URL of the archive for all posts
625941bae64d504609d746ca
def mask(self, space, width, pos): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.masks[(space, width, pos)] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> assert self.lv % space == space % width == 0 <NEW_LINE> m = self.all_mask // ((1 << space) - 1) <NEW_LINE> m *= (1 << width) - 1 <NEW_LINE> m = m << (pos * width) <NEW_LINE> s = self.masks[(space, width, pos)] = self.int_fmt % m <NEW_LINE> return s
Return a certain bit mask as a C literal In the returned bit mask we set the bits at positions ``k * space + pos * width + j, 0 <= j < width``, for all ``k >= 0`` such that the bit position is less than ``n * lv``, with ``n, lv`` as given by method ``set_matrix``. ``space`` must be divisible by ``lv`` and ``width`` must be divisible by ``space``. Since the same mask is used frequently, we use a dictionary for storing the masks already created.
625941ba9b70327d1c4e0c5e
def get_files(self, files): <NEW_LINE> <INDENT> self.logger.info("Get Files") <NEW_LINE> for pattern in files: <NEW_LINE> <INDENT> self.get_pattern(pattern)
Get files from the remote host
625941ba711fe17d825421fc
def run_proc(self, module, wlink, job_data, parameters): <NEW_LINE> <INDENT> if not self.has_plugin(module): <NEW_LINE> <INDENT> raise Exception("No plugin named {}".format(module)) <NEW_LINE> <DEDENT> plugin = self.pmanager.getPluginByName(module) <NEW_LINE> config_settings = plugin.details.items('Settings') <NEW_LINE> config_settings = update_settings(config_settings, parameters) <NEW_LINE> try: <NEW_LINE> <INDENT> settings = {k:v for k,v in self.executables[module]} <NEW_LINE> for k,v in config_settings: <NEW_LINE> <INDENT> if not k in settings: <NEW_LINE> <INDENT> settings[k] = v <NEW_LINE> <DEDENT> <DEDENT> settings = settings.items() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise Exception("Plugin Config not updated: {}!".format(module)) <NEW_LINE> <DEDENT> if wlink['link']: <NEW_LINE> <INDENT> for link in wlink['link']: <NEW_LINE> <INDENT> if not link: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if link['module']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert (self.output_type(link['module']) == self.input_type(module) or self.output_type(link['module']) in self.input_type(module)) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> raise Exception('{} and {} have mismatched input/output types'.format(module, link['module'])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> job_data['wasp_chain'] = wlink <NEW_LINE> output = plugin.plugin_object.base_call(settings, job_data, self) <NEW_LINE> ot = self.output_type(module) <NEW_LINE> wlink.insert_output(output, ot, plugin.name) <NEW_LINE> if not wlink.output: <NEW_LINE> <INDENT> raise Exception('"{}" module failed to produce {}'.format(module, ot)) <NEW_LINE> <DEDENT> data = {'module': module, 'module_output': output} <NEW_LINE> job_data['plugin_output'].append(data)
Run module adapter for wasp interpreter To support the Job_data mechanism, injects wlink
625941ba656771135c3eb6fc
def login(self, mobile, code): <NEW_LINE> <INDENT> app_login_body = {"mobile": mobile, "code": code} <NEW_LINE> logging.info("app登录地址:{}".format(self.app_login_url)) <NEW_LINE> logging.info("app登录body:{}".format(app_login_body)) <NEW_LINE> return requests.post(self.app_login_url, json=app_login_body, headers=apiConfig.app_header)
app登录 :param mobile: 手机号 :param code: 验证码 :return:
625941ba8e05c05ec3eea1fc
def check_input(the_user_entry): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for z in range(length_of_bad_input): <NEW_LINE> <INDENT> if bad_input[z] == the_user_entry: <NEW_LINE> <INDENT> messagebox.showwarning(title="Invalid input!", message="The following characters are forbidden:\n" "~`!@#$%^&*()_-+={[}]|\\:;\"\'<,>.?/1234567890") <NEW_LINE> clear_box() <NEW_LINE> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print("The user entered an invalid character in the entry box\n" "potentially one of the following:\n" "~`!@#$%^&*()_-+={[}]|\\:;\"\'<,>.?/1234567890")
Checks the user's input, throws an error if they enter the wrong character :param the_user_entry: :return:
625941ba187af65679ca4fa7
def load_edml(dir_path): <NEW_LINE> <INDENT> with open(dir_path + '/m.obj', 'rb') as f: <NEW_LINE> <INDENT> m = pickle.load(f) <NEW_LINE> <DEDENT> m.build(outdir=dir_path) <NEW_LINE> m.load(outdir=dir_path) <NEW_LINE> return m
Convenience method for reconstructing a saved EDML object. First loads in metadata from ``m.obj``, then uses that metadata to construct the computation graph. Then, if saved weights are found, these are loaded into the graph. :param dir_path: Path to directory containing the EDML checkpoint files. :return: The loaded EDML instance.
625941ba16aa5153ce362302
def backup(self, backup_info): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.strategy.start_backup(backup_info) <NEW_LINE> backup_info.save() <NEW_LINE> if backup_info.begin_wal is not None: <NEW_LINE> <INDENT> output.info("Backup start at xlog location: %s (%s, %08X)", backup_info.begin_xlog, backup_info.begin_wal, backup_info.begin_offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output.info("Backup start at xlog location: %s", backup_info.begin_xlog) <NEW_LINE> <DEDENT> self.current_action = "copying files" <NEW_LINE> self._start_backup_copy_message(backup_info) <NEW_LINE> self.backup_copy(backup_info) <NEW_LINE> self._stop_backup_copy_message(backup_info) <NEW_LINE> self.strategy.stop_backup(backup_info) <NEW_LINE> self._purge_unused_wal_files(backup_info) <NEW_LINE> <DEDENT> except CommandFailedException as e: <NEW_LINE> <INDENT> _logger.exception(e) <NEW_LINE> raise
Perform a backup for the server - invoked by BackupManager.backup() through the generic interface of a BackupExecutor. This implementation is responsible for performing a backup through the streaming protocol. The connection must be made with a superuser or a user having REPLICATION permissions (see PostgreSQL documentation, Section 20.2), and pg_hba.conf must explicitly permit the replication connection. The server must also be configured with enough max_wal_senders to leave at least one session available for the backup. :param barman.infofile.BackupInfo backup_info: backup information
625941ba32920d7e50b28056
def driverNameToDriverAlias(self, name: str) -> str: <NEW_LINE> <INDENT> return self._conn_manager._drivers_sql_manager.nameToAlias(name)
Return the alias from the name of a sql driver.
625941ba8e71fb1e9831d637
def unset_max_noutput_items(self): <NEW_LINE> <INDENT> return _blocks_swig1.vector_insert_i_sptr_unset_max_noutput_items(self)
unset_max_noutput_items(vector_insert_i_sptr self)
625941baad47b63b2c509e13
def breadthFirstSearch(problem): <NEW_LINE> <INDENT> closed = set([]) <NEW_LINE> fringe = util.Queue() <NEW_LINE> closed.add(problem.getStartState()) <NEW_LINE> for successors in problem.getSuccessors(problem.getStartState()): <NEW_LINE> <INDENT> fringe.push( ( successors[0] , [successors[1]] ) ) <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> if fringe.isEmpty(): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> nodes = fringe.pop() <NEW_LINE> if problem.isGoalState(nodes[0]): <NEW_LINE> <INDENT> return nodes[1] <NEW_LINE> <DEDENT> if nodes[0] not in closed: <NEW_LINE> <INDENT> closed.add(nodes[0]) <NEW_LINE> for successors in problem.getSuccessors(nodes[0]): <NEW_LINE> <INDENT> fringe.push((successors[0], nodes[1] + [successors[1]] ))
Search the shallowest nodes in the search tree first.
625941bad8ef3951e32433c7
def test_fuse_element(self): <NEW_LINE> <INDENT> pass
Checks the fusion of two demons of the same family into an element
625941ba30dc7b76659017f4
def test_credv2_lps_off_v1_aes256_decrypt(self): <NEW_LINE> <INDENT> with open(data_filename("credv2_lps_off_v1_aes256"), "rb") as fd: <NEW_LINE> <INDENT> s = fd.read() <NEW_LINE> <DEDENT> cred = SAPCredv2(s).creds[0].cred <NEW_LINE> self.validate_credv2_plain(cred)
Test decryption of a version 1 AES256 encrypted credential with LPS off
625941ba5fdd1c0f98dc00bc
def get(self, uuid): <NEW_LINE> <INDENT> with open(self.match(uuid)) as bob: <NEW_LINE> <INDENT> data = json.load(bob) <NEW_LINE> <DEDENT> return self.create(**data)
Get a single issue. uuid can be a partial uuid.
625941baa17c0f6771cbdede
def insert(info: BusInfo) -> bool: <NEW_LINE> <INDENT> mongodb = UnibrowserDAO(host=__host, port=__port) <NEW_LINE> result = mongodb.insert(collection=__collection, documents=[info.to_object()]) <NEW_LINE> return result == 1
Inserts a single BusInfo object in Unibrowser's persistent storage. :param faq: the BusInfo to insert :returns: true if the insert succeeds, false otherwise
625941ba851cf427c661a39c
def on_turn(self, game): <NEW_LINE> <INDENT> possible_moves = [ (self.x+1, self.y), (self.x-1, self.y), (self.x, self.y+1), (self.x, self.y-1), (self.x+1, self.y+1), (self.x+1, self.y-1), (self.x-1, self.y+1), (self.x-1, self.y-1), ] <NEW_LINE> target = possible_moves[randint(0, len(possible_moves)-1)] <NEW_LINE> tile = game.map.get_tile(*target) <NEW_LINE> if tile is not None and tile.is_passable(): <NEW_LINE> <INDENT> game.execute_action(actions.action.ActionInstance( source = self, action = actions.misc.Move(), target = target, )) <NEW_LINE> <DEDENT> game.end_turn(randint(60, 120))
Executes this NPC's turn AI, and then returns the amount of time till it can act again.
625941ba45492302aab5e14b
def db_get_document(doc_name, **kwargs): <NEW_LINE> <INDENT> return rdb_conn.conn.db().table(doc_name, **kwargs)
Gets and returns the document by the given name from the database. :param doc_name: Name of the document to fetch :return: The fetched document
625941ba4527f215b584c2e5
def _keep_dist_for_north_or_south_snake(self, direction, snake, dist_from_snake): <NEW_LINE> <INDENT> if snake.rect.centerx < self.rect.centerx and dist_from_snake > self.hover_dist: <NEW_LINE> <INDENT> self._go_chase_west() <NEW_LINE> <DEDENT> elif snake.rect.centerx >= self.rect.centerx and dist_from_snake > self.hover_dist: <NEW_LINE> <INDENT> self._go_chase_east() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if direction == "south": <NEW_LINE> <INDENT> self._go_chase_south() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._go_chase_north()
Causes Boo to keep its distance from Snake while Boo is going north or south.
625941bab5575c28eb68de88
def upgma( matrix, taxa, distances=True ): <NEW_LINE> <INDENT> return cluster.upgma(matrix,taxa,distances)
Carry out a cluster analysis based on the UPGMA algorithm (:evobib:`Sokal1958`). Parameters ---------- matrix : list A two-dimensional list containing the distances. taxa : list An list containing the names of all taxa corresponding to the distances in the matrix. distances : bool (default=True) If set to **False**, only the topology of the tree will be returned. Returns ------- newick : str A string in newick-format which can be further used in biological software packages to view and plot the tree. Examples -------- Function is automatically imported when importing lingpy. >>> from lingpy import * >>> from lingpy.algorithm import squareform Create an arbitrary list of taxa. >>> taxa = ['German','Swedish','Icelandic','English','Dutch'] Create an arbitrary matrix. >>> matrix = squareform([0.5,0.67,0.8,0.2,0.4,0.7,0.6,0.8,0.8,0.3]) Carry out the cluster analysis. >>> upgma(matrix,taxa,distances=False) '((Swedish,Icelandic),(English,(German,Dutch)));' See also -------- neighbor
625941bab7558d58953c4da5
@register(name='shrink', filter=TYPE_STATE_FILTER, unshade=True) <NEW_LINE> def _shrink(win, direction, vertical_first=True, xinerama=False): <NEW_LINE> <INDENT> if direction.is_middle: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> workarea = get_current_workarea(win, xinerama) <NEW_LINE> shrink = Shrinker(workarea=workarea, vertical_first=vertical_first) <NEW_LINE> geometry = shrink(win, direction.invert()) <NEW_LINE> log.debug('Setting %s' % (geometry,)) <NEW_LINE> win.set_geometry(geometry, direction)
Shrink window in given direction.
625941ba287bf620b61d38f8
def create_deployment(self, stack_id, command, app_id=None, instance_ids=None, comment=None, custom_json=None): <NEW_LINE> <INDENT> params = {'StackId': stack_id, 'Command': command, } <NEW_LINE> if app_id is not None: <NEW_LINE> <INDENT> params['AppId'] = app_id <NEW_LINE> <DEDENT> if instance_ids is not None: <NEW_LINE> <INDENT> params['InstanceIds'] = instance_ids <NEW_LINE> <DEDENT> if comment is not None: <NEW_LINE> <INDENT> params['Comment'] = comment <NEW_LINE> <DEDENT> if custom_json is not None: <NEW_LINE> <INDENT> params['CustomJson'] = custom_json <NEW_LINE> <DEDENT> return self.make_request(action='CreateDeployment', body=json.dumps(params))
Deploys a stack or app. + App deployment generates a `deploy` event, which runs the associated recipes and passes them a JSON stack configuration object that includes information about the app. + Stack deployment runs the `deploy` recipes but does not raise an event. :type stack_id: string :param stack_id: The stack ID. :type app_id: string :param app_id: The app ID, for app deployments. :type instance_ids: list :param instance_ids: The instance IDs for the deployment targets. :type command: dict :param command: A `DeploymentCommand` object that describes details of the operation. :type comment: string :param comment: A user-defined comment. :type custom_json: string :param custom_json: A string that contains user-defined, custom JSON. It is used to override the corresponding default stack configuration JSON values. The string should be in the following format and must escape characters such as '"'.: `"{"key1": "value1", "key2": "value2",...}"`
625941ba8a349b6b435e7fff
def aspect_aware_resizing( self, image, max_size, interpolation=Image.LANCZOS): <NEW_LINE> <INDENT> height, width = image.shape[:2] <NEW_LINE> larger_of_the_two = max(height, width) <NEW_LINE> if larger_of_the_two > max_size: <NEW_LINE> <INDENT> scaling_value = max_size / float(larger_of_the_two) <NEW_LINE> resize_height = int(np.floor(height * scaling_value)) <NEW_LINE> resize_width = int(np.floor(width * scaling_value)) <NEW_LINE> pillow_image = Image.fromarray(image) <NEW_LINE> pillow_image = pillow_image.resize( (resize_width, resize_height), resample=interpolation) <NEW_LINE> image = np.asarray(pillow_image) <NEW_LINE> <DEDENT> return image
Performs resizing while maintaining the aspect ratio of the image Parameters ---------- image : instance of `numpy.ndarray` Image matrix, which shoud be in the specified mode. max_size : int The maximum allowed image size, which applies to both width and height. The larger of the 2 will be used to compute the ratio of downsampling, which is then applied to both dimensions. interpolation : int, optional, defaults to PIL.Image.LANCZOS Interpolation algorithm using during downsampling, which should be one of the supported enums for `PIL.Image.resize`. Returns ------- image : instance of `numpy.ndarray` Image with dimensions guaranteed to be within the bounds specified by the `max_size` parameter.
625941baaad79263cf3908c6
def names(self, search=[]): <NEW_LINE> <INDENT> dct = self.parse() <NEW_LINE> insearch = lambda x: any([word in x for word in search]) <NEW_LINE> return [dct[e] for e in dct if insearch(e.text)]
Provide a list of names to search through and return the corresponding entries
625941bad8ef3951e32433c8
def acos(x): <NEW_LINE> <INDENT> return select( ne(x, _const(x, -1.0)), mul(_const(x, 2), atan2(sqrt(sub(_const(x, 1), square(x))), add(_const(x, 1), x))), full_like(x, onp.pi))
Elementwise arc cosine: :math:`\mathrm{acos}(x)`.
625941ba4e696a04525c92d9
@pytest.fixture(scope="module") <NEW_LINE> def affiliations_service(app): <NEW_LINE> <INDENT> return getattr(current_rdm_records, "affiliations_service")
Affiliations service.
625941ba7b180e01f3dc4690
def is_date(date_string): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> datetime.strptime(date_string, '%d-%m-%Y') <NEW_LINE> return True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False
Verifies if a string represents a date.
625941ba26068e7796caeb64
def get_sum_to_recharge(self): <NEW_LINE> <INDENT> balance = self.balance_checker() <NEW_LINE> if balance > settings.MIN_BALANCE: <NEW_LINE> <INDENT> self.log(u"Current balance %s - no need to refill", balance) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> to_recharge = 250 - balance <NEW_LINE> to_recharge = max(settings.MIN_RECHARGE_AMOUNT, to_recharge) <NEW_LINE> to_recharge = min(settings.MAX_RECHARGE_AMOUNT, to_recharge) <NEW_LINE> self.log(u"Current balance %s - refill for %s", balance, to_recharge) <NEW_LINE> return to_recharge
Return the sum we need to put into balance
625941bad268445f265b4cf9
def convertEndianess(hexString): <NEW_LINE> <INDENT> if version < '3': <NEW_LINE> <INDENT> import codecs <NEW_LINE> S = hexString <NEW_LINE> return codecs.encode(codecs.decode(S, 'hex')[::-1], 'hex').decode() <NEW_LINE> <DEDENT> elif not version < '3': <NEW_LINE> <INDENT> s = hexString <NEW_LINE> return codecs.encode(codecs.decode(s, 'hex')[::-1], 'hex').decode()
Takes string of Little/Big Endian hex, converts Endianess, returns hex string.
625941ba63b5f9789fde6f70
def testJsonChangeRelationTypeRequest(self): <NEW_LINE> <INDENT> pass
Test JsonChangeRelationTypeRequest
625941baa934411ee3751525
def run(self): <NEW_LINE> <INDENT> id_list = self.get_href() <NEW_LINE> url_list = self.get_url_list(id_list) <NEW_LINE> for url in url_list: <NEW_LINE> <INDENT> html = self.parse_url(url) <NEW_LINE> content_list = self.get_content_list(html) <NEW_LINE> self.save_content(content_list)
run函数实现主逻辑
625941bad164cc6175782bd9
def add_simple_link(self, issue, object): <NEW_LINE> <INDENT> data = {"object": object} <NEW_LINE> url = self._get_url('issue/' + str(issue) + '/remotelink') <NEW_LINE> r = self._session.post( url, data=json.dumps(data)) <NEW_LINE> simple_link = RemoteLink( self._options, self._session, raw=json_loads(r)) <NEW_LINE> return simple_link
Add a simple remote link from an issue to web resource. This avoids the admin access problems from add_remote_link by just using a simple object and presuming all fields are correct and not requiring more complex ``application`` data. ``object`` should be a dict containing at least ``url`` to the linked external URL and ``title`` to display for the link inside JIRA. For definitions of the allowable fields for ``object`` , see https://developer.atlassian.com/display/JIRADEV/JIRA+REST+API+for+Remote+Issue+Links. :param issue: the issue to add the remote link to :param object: the dictionary used to create remotelink data
625941ba66656f66f7cbc035
def get_tier_name(self): <NEW_LINE> <INDENT> tier = self.get_tier() <NEW_LINE> if tier: <NEW_LINE> <INDENT> return tier.tier_locale()
Returns the price tier for showing prices in the reviewer tools and developer hub.
625941ba1f037a2d8b94608a
def calc_sheet_id_onchange(self, cr, uid, ids, calc_sheet_id, context=None): <NEW_LINE> <INDENT> res = {'value': {'line_ids': False}} <NEW_LINE> if calc_sheet_id: <NEW_LINE> <INDENT> product_lines = [] <NEW_LINE> calssheet = self.pool.get('sale.prequotation').browse(cr, uid, calc_sheet_id, context=context) <NEW_LINE> for line in calssheet.order_line: <NEW_LINE> <INDENT> product_lines += self._prepare_pr_line(line) <NEW_LINE> <DEDENT> for line in calssheet.order_line_labour: <NEW_LINE> <INDENT> product_lines += self._prepare_pr_line(line) <NEW_LINE> <DEDENT> res['value']['line_ids'] = product_lines <NEW_LINE> res['value'].update({'all_selected': True}) <NEW_LINE> if calssheet.mat_sale_id and calssheet.labour_sale_id and (calssheet.mat_sale_id.id != calssheet.labour_sale_id.id): <NEW_LINE> <INDENT> res['value'].update({'ref_sale_order_id': False}) <NEW_LINE> <DEDENT> elif calssheet.mat_sale_id: <NEW_LINE> <INDENT> res['value'].update({'ref_sale_order_id': calssheet.mat_sale_id.id}) <NEW_LINE> <DEDENT> elif calssheet.labour_sale_id: <NEW_LINE> <INDENT> res['value'].update({'ref_sale_order_id': calssheet.labour_sale_id.id}) <NEW_LINE> <DEDENT> <DEDENT> return res
Changes purchase requisition line if Calc Sheet changes. @param name: Name of the field @param product_id: Changed product_id @return: Dictionary of changed values
625941bafb3f5b602dac351a
def main(): <NEW_LINE> <INDENT> s = server.server('192.168.100.14', 4004) <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if s.acceptingConnections: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn, addr = s.accept() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> s.acceptConnection(conn) <NEW_LINE> <DEDENT> <DEDENT> for name, conn in s.users.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> r = conn.recv(1024).strip() <NEW_LINE> <DEDENT> except socket.error: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> message = s.decrypt(r) <NEW_LINE> if '::' in message: <NEW_LINE> <INDENT> splt = message.split('::') <NEW_LINE> if splt[0] == 'function': <NEW_LINE> <INDENT> splt2 = splt[1].split(':') <NEW_LINE> if splt2[0] == 'createNewGame': <NEW_LINE> <INDENT> s.createNewGame() <NEW_LINE> <DEDENT> elif splt2[0] == 'joinGame': <NEW_LINE> <INDENT> s.joinGame(name, splt2[1]) <NEW_LINE> <DEDENT> elif splt2[0] == 'ready': <NEW_LINE> <INDENT> s.addReadyPlayer(name) <NEW_LINE> <DEDENT> elif splt2[0] == 'start': <NEW_LINE> <INDENT> s.startGame(name) <NEW_LINE> <DEDENT> elif splt2[0] == 'movePlayer': <NEW_LINE> <INDENT> s.handleMove(name,splt2[1]) <NEW_LINE> <DEDENT> elif splt2[0] == 'endTurn': <NEW_LINE> <INDENT> s.endTurn(name) <NEW_LINE> <DEDENT> elif splt2[0] == 'makingSuggestion': <NEW_LINE> <INDENT> s.handleSuggestion(name,splt2[1]) <NEW_LINE> <DEDENT> elif splt2[0] == 'revealCard': <NEW_LINE> <INDENT> s.revealCard(name,splt2[1],splt2[2]) <NEW_LINE> <DEDENT> elif splt2[0] == 'makingAccusation': <NEW_LINE> <INDENT> s.handleAccusation(name,splt2[1]) <NEW_LINE> <DEDENT> <DEDENT> elif splt[0] == 'message': <NEW_LINE> <INDENT> s.broadcastMessageToAll(0, '%s> %s' % (name, splt[1])) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not message: <NEW_LINE> <INDENT> s.removePlayer(name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> time.sleep(.1) <NEW_LINE> <DEDENT> except (SystemExit, KeyboardInterrupt): <NEW_LINE> <INDENT> break
Main event loop that launches a Clue-Less server and listens for incoming connections and messages from clients.
625941ba099cdd3c635f0ae7
def sumOfDistancesInTree(self, N, edges): <NEW_LINE> <INDENT> if N==1: <NEW_LINE> <INDENT> return [0] <NEW_LINE> <DEDENT> d={} <NEW_LINE> for u,v in edges: <NEW_LINE> <INDENT> if u in d: <NEW_LINE> <INDENT> d[u].append(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d[u]=[v] <NEW_LINE> <DEDENT> if v in d: <NEW_LINE> <INDENT> d[v].append(u) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d[v]=[u] <NEW_LINE> <DEDENT> <DEDENT> self.matrix=[[-1 for i in range(N)] for j in range(N)] <NEW_LINE> List=[] <NEW_LINE> for u in range(N): <NEW_LINE> <INDENT> self.matrix[u][u]=0 <NEW_LINE> self.dfs(u,u,d,0) <NEW_LINE> List.append(sum(self.matrix[u])) <NEW_LINE> <DEDENT> return List
:type N: int :type edges: List[List[int]] :rtype: List[int]
625941ba23e79379d52ee3f2
def load_od_data(coco_dir, data_name, use_crowded): <NEW_LINE> <INDENT> import pycocotools.coco <NEW_LINE> coco_dir = pathlib.Path(coco_dir) <NEW_LINE> coco = pycocotools.coco.COCO( str(coco_dir / "annotations" / f"instances_{data_name}.json") ) <NEW_LINE> class_names = [c["name"] for c in coco.loadCats(coco.getCatIds())] <NEW_LINE> jsonclassid_to_index = { c["id"]: class_names.index(c["name"]) for c in coco.loadCats(coco.getCatIds()) } <NEW_LINE> labels = [] <NEW_LINE> for entry in coco.loadImgs(coco.getImgIds()): <NEW_LINE> <INDENT> dirname, filename = entry["coco_url"].split("/")[-2:] <NEW_LINE> objs = coco.loadAnns( coco.getAnnIds(imgIds=entry["id"], iscrowd=None if use_crowded else False) ) <NEW_LINE> bboxes, classes, areas, crowdeds = [], [], [], [] <NEW_LINE> width, height = entry["width"], entry["height"] <NEW_LINE> for obj in objs: <NEW_LINE> <INDENT> if obj.get("ignore", 0) == 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> x, y, w, h = obj["bbox"] <NEW_LINE> bbox = np.array([x, y, x + w, y + h]) / np.array( [width, height, width, height] ) <NEW_LINE> bbox = np.clip(bbox, 0, 1) <NEW_LINE> if (bbox[:2] < bbox[2:]).all(): <NEW_LINE> <INDENT> bboxes.append(bbox) <NEW_LINE> classes.append(jsonclassid_to_index[obj["category_id"]]) <NEW_LINE> areas.append(obj["area"]) <NEW_LINE> crowdeds.append(obj["iscrowd"]) <NEW_LINE> <DEDENT> <DEDENT> labels.append( tk.od.ObjectsAnnotation( path=coco_dir / dirname / filename, width=width, height=height, classes=classes, bboxes=bboxes, areas=areas, crowdeds=crowdeds, ) ) <NEW_LINE> <DEDENT> return tk.od.ObjectsAnnotation.create_dataset(labels, class_names=class_names)
物体検出のデータの読み込み。
625941baa4f1c619b28afecc
def test_list_null_cr(self): <NEW_LINE> <INDENT> self.data_in = [None] <NEW_LINE> self.expected = '[null]' <NEW_LINE> opts.compact = True <NEW_LINE> opts.raw = True <NEW_LINE> self.assertEqual(self.json_out.create_json(self.data_in), self.expected)
Test [None] -cr
625941ba91f36d47f21ac381
def _get_splice_site_coordinates(self, t, start, end, exon_i): <NEW_LINE> <INDENT> left_diff, right_diff = TranscriptProviderUtils.determine_closest_distance_from_exon(start, end, exon_i, t) <NEW_LINE> if abs(left_diff) < abs(right_diff): <NEW_LINE> <INDENT> dist_from_exon = left_diff * -1 <NEW_LINE> if dist_from_exon > -1: dist_from_exon = -1 <NEW_LINE> <DEDENT> elif abs(right_diff) < abs(left_diff): <NEW_LINE> <INDENT> dist_from_exon = right_diff * -1 <NEW_LINE> if dist_from_exon < 1: dist_from_exon = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dist_from_exon = 0 <NEW_LINE> <DEDENT> if t.get_strand() == "-": <NEW_LINE> <INDENT> dist_from_exon *= -1 <NEW_LINE> <DEDENT> return dist_from_exon
Returns distance from exon.
625941bafbf16365ca6f6048
def nb_mots(chaine): <NEW_LINE> <INDENT> if chaine == "": <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> res = 1 <NEW_LINE> for car in chaine: <NEW_LINE> <INDENT> if car == " ": <NEW_LINE> <INDENT> res += 1 <NEW_LINE> <DEDENT> <DEDENT> return res
Suppose que chaine contient des mots séparés par un seul espace. En ce cas, compter les mots consiste à compter les espaces et à ajouter 1... Remarque : il existe les fonction count() et split(), mais c'est de la triche
625941bae8904600ed9f1db4
def get_utc_timestamp() -> Text: <NEW_LINE> <INDENT> return str(datetime.datetime.utcnow().timestamp() * 1000)
Get utc timestamp. Returns: Text: utc timestamp
625941ba925a0f43d2549cff
def get_reference_node(self, name='reference'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ref_node = self.get_node('reference', name) <NEW_LINE> <DEDENT> except NodeNotFoundException: <NEW_LINE> <INDENT> ref_node = self.create_node('reference', name, {'reference': 'reference'}, reference=True) <NEW_LINE> self.get_reference_node().instance(ref_node, key='reference_%s' % name) <NEW_LINE> <DEDENT> return ref_node
Nodes returned here are very easily referenced by name and then function as an index for all attached nodes The most typical use case is to index all of the nodes of a certain type, but the functionality is not limited to this.
625941baec188e330fd5a631
def get_year(soup): <NEW_LINE> <INDENT> nav_obj_lst = soup.findAll("nav", attrs={"id": ["breadcrumbs"]}) <NEW_LINE> if nav_obj_lst: <NEW_LINE> <INDENT> if nav_obj_lst[0].text: <NEW_LINE> <INDENT> year_with_colon = "".join( [ x for x in nav_obj_lst[0].text.split(" ") if any(c.isdigit() for c in x) ] ) <NEW_LINE> if year_with_colon: <NEW_LINE> <INDENT> year = "".join([x for x in year_with_colon if x.isdigit()]) <NEW_LINE> return year
function pulls the year out of a PCC Class Scehdule Course Page. Year is in breadcrumbs at the top of the page input: bs4 Soup Object, from a PCC Class Schedule Course Page output: str, string that is the course year, Example: '2018'
625941ba3d592f4c4ed1cf0a
def countArrangement(self, n: int) -> int: <NEW_LINE> <INDENT> @lru_cache(None) <NEW_LINE> def dfs(i, remains: Set[int]): <NEW_LINE> <INDENT> if i == n+1: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> cnt = 0 <NEW_LINE> for j in remains: <NEW_LINE> <INDENT> if i%j == 0 or j%i == 0: <NEW_LINE> <INDENT> cnt += dfs(i+1, remains - {j}) <NEW_LINE> <DEDENT> <DEDENT> return cnt <NEW_LINE> <DEDENT> return dfs(1, frozenset(range(1, n+1)))
DFS using a set, especially frozenset for caching
625941bad99f1b3c44c67421
def compute_checksum(stream, algo, message_digest, chunk_size=None, progress_callback=None): <NEW_LINE> <INDENT> chunk_size = chunk_size_or_default(chunk_size) <NEW_LINE> bytes_read = 0 <NEW_LINE> while 1: <NEW_LINE> <INDENT> chunk = stream.read(chunk_size) <NEW_LINE> if not chunk: <NEW_LINE> <INDENT> if progress_callback: <NEW_LINE> <INDENT> progress_callback(bytes_read) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> message_digest.update(chunk) <NEW_LINE> bytes_read += len(chunk) <NEW_LINE> if progress_callback: <NEW_LINE> <INDENT> progress_callback(bytes_read) <NEW_LINE> <DEDENT> <DEDENT> return "{0}:{1}".format(algo, message_digest.hexdigest())
Get helper method to compute checksum from a stream. :param stream: File-like object. :param algo: Identifier for checksum algorithm. :param messsage_digest: A message digest instance. :param chunk_size: Read at most size bytes from the file at a time. :param progress_callback: Function accepting one argument with number of bytes read. (Default: ``None``) :returns: The checksum.
625941ba1d351010ab8559a9
def descend(): <NEW_LINE> <INDENT> global objects, depth, dstairs, astairs <NEW_LINE> LEVELS[depth] = objects <NEW_LINE> objects = [] <NEW_LINE> depth += 1 <NEW_LINE> try: <NEW_LINE> <INDENT> objects = LEVELS[depth] <NEW_LINE> for obj in objects: <NEW_LINE> <INDENT> if obj.name == dstairs.name: <NEW_LINE> <INDENT> dstairs = obj <NEW_LINE> <DEDENT> if obj.name == astairs.name: <NEW_LINE> <INDENT> astairs = obj <NEW_LINE> <DEDENT> <DEDENT> while True: <NEW_LINE> <INDENT> x = astairs.x + roguelib.random_get_int(0, -1, 1) <NEW_LINE> y = astairs.y + roguelib.random_get_int(0, -1, 1) <NEW_LINE> if not is_blocked(x, y) and x != astairs.x and y != astairs.y: <NEW_LINE> <INDENT> player.x = x <NEW_LINE> player.y = y <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> make_dungeon() <NEW_LINE> <DEDENT> compute_fov()
Decends down the stairs, creating a new map and saving the old one.
625941ba8e7ae83300e4ae57
def histogram_eq(image): <NEW_LINE> <INDENT> image1 = np.copy(image) <NEW_LINE> image1[:,:,0] = cv2.equalizeHist(image1[:,:,0]) <NEW_LINE> image1[:,:,1] = cv2.equalizeHist(image1[:,:,1]) <NEW_LINE> image1[:,:,2] = cv2.equalizeHist(image1[:,:,2]) <NEW_LINE> return image1
Perform histogram equalization on the input image. See https://en.wikipedia.org/wiki/Histogram_equalization.
625941baa17c0f6771cbdedf
def gather_property(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.calculate_all(*args, **kwargs)
The old alias for calculate_all, retained for compatibility
625941ba23849d37ff7b2f1d
def text_process(mess): <NEW_LINE> <INDENT> nopunc = [char for char in mess if char not in string.punctuation] <NEW_LINE> nopunc = ''.join(nopunc) <NEW_LINE> return [word for word in nopunc.split() if word.lower() not in stopwords.words('english')]
Removing the punctuations Removing the common words Returning the cleaned words
625941ba21bff66bcd6847e0
def test1(self): <NEW_LINE> <INDENT> y = T.tensor4('y') <NEW_LINE> self.mode = self.mode.excluding('fusion') <NEW_LINE> f = theano.function([y], y[::-1][::-1], mode=self.mode) <NEW_LINE> graph = f.maker.fgraph.toposort() <NEW_LINE> divs = [node for node in graph if isinstance(node.op, T.elemwise.Elemwise) and isinstance(node.op.scalar_op, theano.scalar.IntDiv)] <NEW_LINE> assert len(divs) == 0
Tests removing the extra floor_div by 1 introduced by local_subtensor_merge optimization
625941ba10dbd63aa1bd2a3a
def main(self, args=None, prog_name=None, complete_var=None, standalone_mode=True, **extra): <NEW_LINE> <INDENT> return click.Command.main(self, args=args, prog_name=self.name, complete_var=complete_var, standalone_mode=standalone_mode, **extra)
Workaround click 4.0 bug https://github.com/mitsuhiko/click/issues/365
625941baab23a570cc25000b
@staff_member_required <NEW_LINE> def import_ldap_user(request): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> form = ImportLDAPForm(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> tutorial = form.cleaned_data['tutorial'] <NEW_LINE> uids = form.cleaned_data['uids'].split() <NEW_LINE> g = Group.objects.get(name='User') <NEW_LINE> udict = {} <NEW_LINE> unknown_uids = [] <NEW_LINE> for uid in uids: <NEW_LINE> <INDENT> udict[uid] = fetch_ldapuser_dict(uid=uid) <NEW_LINE> if udict[uid] is None: <NEW_LINE> <INDENT> unknown_uids.append(uid) <NEW_LINE> <DEDENT> <DEDENT> if unknown_uids: <NEW_LINE> <INDENT> messages.add_message(request, messages.ERROR, "ERROR! Import cancelled. Unknown UIDs: %s" % (", ".join(unknown_uids))) <NEW_LINE> return render(request,'admin/accounts/user/import_ldap.html', {'form':form, 'title':"Import LDAP Users" }) <NEW_LINE> <DEDENT> for uid in udict: <NEW_LINE> <INDENT> u = create_localuser_from_ldapuser(username=uid, ldapUser=udict[uid]) <NEW_LINE> u.groups.add(g) <NEW_LINE> u.tutorial = tutorial <NEW_LINE> u.save() <NEW_LINE> <DEDENT> <DEDENT> return HttpResponseRedirect(reverse('admin:accounts_user_changelist')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form = ImportLDAPForm() <NEW_LINE> <DEDENT> return render(request,'admin/accounts/user/import_ldap.html', {'form':form, 'title':"Import LDAP Users" })
View in the admin
625941ba090684286d50eb6c
def get_data_filter(self): <NEW_LINE> <INDENT> return self._data_filter
Returns the station filter :rtype: Filter. :return: the station name
625941ba3eb6a72ae02ec361
def nothing(self, x): <NEW_LINE> <INDENT> pass
Callback function for the trackbars used by the gui_choose_hsv() function.
625941baf548e778e58cd407
def _schurDecompBasicQrIteration(mat): <NEW_LINE> <INDENT> if(not ifSquareMat(mat)): <NEW_LINE> <INDENT> print('[schurDecompBasicQrIteration]: not a square matrix'); <NEW_LINE> return None; <NEW_LINE> <DEDENT> uMat = eye(sizeSquareMat(mat)); <NEW_LINE> tMat = cloneMat(mat); <NEW_LINE> oldEigDiagVec = zeroes(sizeSquareMat(mat)); <NEW_LINE> newEigDiagVec = zeroes(sizeSquareMat(mat)); <NEW_LINE> while(True): <NEW_LINE> <INDENT> (qMat, rMat) = qrDecomp(tMat); <NEW_LINE> uMat = mulMatMat(uMat, qMat); <NEW_LINE> tMat = mulMatMat(rMat, qMat); <NEW_LINE> newEigDiagVec = getMatDiag(tMat); <NEW_LINE> if(ifZeroVec(subVecVec(oldEigDiagVec, newEigDiagVec))): break; <NEW_LINE> oldEigDiagVec = newEigDiagVec; <NEW_LINE> <DEDENT> return (uMat, tMat);
[schurDecompBasicQrIteration]: compute an upper triangular matrix uMat and a unitary matrix vMat such that mat = uMat * tMat * uMat* is the Schur decomposition of mat. It requires mat has distinct real eigenvalues. If mat is Hermitian, then tMat is a diagonal matrix: mat -> (uMat, tMat): mat = uMat * tMat * uMat* args: mat, a square matrix returns: uMat: unitary matrix tMat: upper-triangular matrix, or diagonal matrix if mat is Hermitian/Symmetric reference: Sol Large-scale Eigenvalue Problems, Chp 3, Algo 3.1, P 52
625941ba0383005118ecf470
def cmd_STORED(self): <NEW_LINE> <INDENT> self._current.popleft().success(True)
Manage a success response to a set operation.
625941bac4546d3d9de728bc
def DefaultPrivateKeySize(): <NEW_LINE> <INDENT> return 2048
User can choose size of his Private Key during install. Can be 1024, 2048 or 4096.
625941ba435de62698dfdadf
def testGetPreviewDimensions_shortCanvas(self): <NEW_LINE> <INDENT> mockModel = mock.Mock(name='mockModel') <NEW_LINE> mockModel.getWidth.return_value = 100 <NEW_LINE> mockModel.getHeight.return_value = 100 <NEW_LINE> c = controller.Controller(mockModel, None) <NEW_LINE> mockPreview = mock.Mock(name='mockPreview') <NEW_LINE> mockPreview.previewContainer.winfo_width.side_effect = [100] <NEW_LINE> mockPreview.previewContainer.winfo_height.side_effect = [50] <NEW_LINE> c.preview = mockPreview <NEW_LINE> dimensions = c._Controller__getPreviewDimensions() <NEW_LINE> self.assertEqual(dimensions, (50, 50))
Check that if the preview canvas is too short the region is scaled down.
625941bae1aae11d1e749b40
def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> result = 0 <NEW_LINE> i = 0 <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> start = timeit.default_timer() <NEW_LINE> func(*args, **kwargs) <NEW_LINE> final = timeit.default_timer() <NEW_LINE> result += final - start <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> print(result / i)
:param args: :param kwargs: :return: The average time out of n cycles of func
625941ba31939e2706e4ccfb
def InstancesChanged(self): <NEW_LINE> <INDENT> rv = bool(_c.env_getInstancesChanged(self.__env)) <NEW_LINE> _c.env_setInstancesChanged(self.__env, False) <NEW_LINE> return rv
test if Instances have changed since last call
625941ba82261d6c526ab32e
def to_keypoint_image(self, size=1): <NEW_LINE> <INDENT> eu.do_assert(len(self.keypoints) > 0) <NEW_LINE> height, width = self.shape[:2] <NEW_LINE> image = np.zeros((height, width, len(self.keypoints)), dtype=np.uint8) <NEW_LINE> sizeh = max(0, (size - 1) // 2) <NEW_LINE> for i, kp in enumerate(self.keypoints): <NEW_LINE> <INDENT> y, x = kp.y_int, kp.x_int <NEW_LINE> x1 = np.clip(x - sizeh, 0, width - 1) <NEW_LINE> x2 = np.clip(x + sizeh + 1, 0, width - 1) <NEW_LINE> y1 = np.clip(y - sizeh, 0, height - 1) <NEW_LINE> y2 = np.clip(y + sizeh + 1, 0, height - 1) <NEW_LINE> if x1 < x2 and y1 < y2: <NEW_LINE> <INDENT> image[y1:y2, x1:x2] = 128 <NEW_LINE> <DEDENT> if 0 <= y < height and 0 <= x < width: <NEW_LINE> <INDENT> image[y, x, i] = 255 <NEW_LINE> <DEDENT> <DEDENT> return image
Draws a new black image of shape (H,W,N) in which all keypoint coordinates are set to 255. (H=shape height, W=shape width, N=number of keypoints) This function can be used as a helper when augmenting keypoints with a method that only supports the augmentation of images. Parameters ------- size : int Size of each (squared) point. Returns ------- image : (H,W,N) ndarray Image in which the keypoints are marked. H is the height, defined in KeypointsOnImage.shape[0] (analogous W). N is the number of keypoints.
625941ba5fdd1c0f98dc00bd
def demo(): <NEW_LINE> <INDENT> import taurus <NEW_LINE> db = taurus.Authority() <NEW_LINE> host = db.getNormalName() <NEW_LINE> w = main_TaurusDbTableWidget(host, TaurusElementType.Device) <NEW_LINE> return w
Table panels
625941ba63d6d428bbe4437b
def forward(self, x : UFloatTensor) -> UFloatTensor: <NEW_LINE> <INDENT> x = self.conv(x) <NEW_LINE> if self.activation: <NEW_LINE> <INDENT> x = self.activation(x) <NEW_LINE> <DEDENT> if self.bn: <NEW_LINE> <INDENT> x = self.bn(x) <NEW_LINE> <DEDENT> return x
:param x: Any input tensor that can be input into nn.Conv2d. :return: Tensor with convolutional layer and optional activation and batchnorm applied.
625941ba9b70327d1c4e0c5f
def set_keybindings(self, keybindings=[], debug_event=None): <NEW_LINE> <INDENT> tbl = [] <NEW_LINE> bindings = keybindings <NEW_LINE> if debug_event is not None: <NEW_LINE> <INDENT> mod = wx.ACCEL_CMD if util.platform() == "osx" else wx.ACCEL_CTRL <NEW_LINE> bindings.append((mod, ord('`'), debug_event)) <NEW_LINE> <DEDENT> for binding in keybindings: <NEW_LINE> <INDENT> keyid = wx.NewId() <NEW_LINE> self.Bind(wx.EVT_MENU, binding[2], id=keyid) <NEW_LINE> tbl.append((binding[0], binding[1], keyid)) <NEW_LINE> <DEDENT> if len(bindings): <NEW_LINE> <INDENT> self.SetAcceleratorTable(wx.AcceleratorTable(tbl))
Method to easily set key bindings. Also sets up debug keybindings and events.
625941ba85dfad0860c3ace5
def download_weekly_pointing_file(date): <NEW_LINE> <INDENT> date = parse_time(date) <NEW_LINE> tmp_dir = tempfile.mkdtemp() <NEW_LINE> base_url = 'ftp://legacy.gsfc.nasa.gov/FTP/glast/data/lat/weekly/spacecraft/' <NEW_LINE> fbasename = 'lat_spacecraft_weekly_w' <NEW_LINE> weekly_file_start = parse_time('2008-08-07') <NEW_LINE> base_week = 10 <NEW_LINE> time_diff = date - weekly_file_start <NEW_LINE> weekdiff = time_diff.days // 7 <NEW_LINE> week = weekdiff + base_week <NEW_LINE> weekstr = '{:03.0f}'.format(week) <NEW_LINE> full_fname = fbasename + weekstr + '_p202_v001.fits' <NEW_LINE> pointing_file_url = base_url + full_fname <NEW_LINE> try: <NEW_LINE> <INDENT> resp = urllib.request.urlopen(pointing_file_url) <NEW_LINE> exists = True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> urllib.error.HTTPError <NEW_LINE> exists = False <NEW_LINE> <DEDENT> if not exists: <NEW_LINE> <INDENT> raise ValueError('No Fermi pointing files found for given date!') <NEW_LINE> <DEDENT> destination = os.path.join(tmp_dir, full_fname) <NEW_LINE> urllib.request.urlretrieve(pointing_file_url, destination) <NEW_LINE> return destination
Downloads the FERMI/LAT weekly pointing file corresponding to the specified date. This file contains 1 minute cadence data on the spacecraft pointing, useful for calculating detector angles. Parameters ---------- date : `datetime.datetime` A datetime object or other date format understood by the parse_time function.
625941ba55399d3f0558853f
def kmerFrequencies(self,samp=None): <NEW_LINE> <INDENT> res = self._kmers(samp,self.frequences,self.resultF) <NEW_LINE> normPolicy = self.normPolicy <NEW_LINE> assert normPolicy & NORM_POLICY.FREQ, normPolicy <NEW_LINE> if normPolicy & NORM_POLICY.EU_ROW: <NEW_LINE> <INDENT> res["val"] /= n.sqrt(n.dot(res["val"],res["val"])) <NEW_LINE> <DEDENT> elif normPolicy & NORM_POLICY.NONE_ROW: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ValueError(normPolicy) <NEW_LINE> <DEDENT> return res
Return frequencies normalized according to normalization policy supplied to ctor. @param[in] samp If not None, process() will be called first for this data, and then count returned. Otherwise it will assume that process() has been called before (maybe multiple times), and simply finalize and return counts.
625941ba4d74a7450ccd404f
def get_ResponseFile(self): <NEW_LINE> <INDENT> return super(IHttpResponse, self).get_ResponseFile()
Method IHttpResponse.get_ResponseFile OUTPUT responseFilePath : BSTR*
625941baf548e778e58cd408
def lastScenePos(self): <NEW_LINE> <INDENT> pass
QGraphicsSceneMouseEvent.lastScenePos() -> QPointF
625941ba8e05c05ec3eea1fd
def test_case(self): <NEW_LINE> <INDENT> self.assertEquals( Select( [Case((self.schema.FOO.BAR < 1), Constant(2), Constant(3))], From=self.schema.FOO, Limit=123 ).toSQL(), SQLFragment("select case when BAR < ? then ? else ? end from FOO limit ?", [1, 2, 3, 123]) ) <NEW_LINE> self.assertEqual(Case((self.schema.FOO.BAR < 1), Constant(2), Constant(3)).allColumns(), [self.schema.FOO.BAR, ]) <NEW_LINE> self.assertEquals( Select( [Case((self.schema.FOO.BAR < 1), Constant(2), None)], From=self.schema.FOO, Limit=123 ).toSQL(), SQLFragment("select case when BAR < ? then ? else null end from FOO limit ?", [1, 2, 123]) ) <NEW_LINE> self.assertEqual(Case((self.schema.FOO.BAR < 1), Constant(2), None).allColumns(), [self.schema.FOO.BAR, ]) <NEW_LINE> self.assertEquals( Select( [Case((self.schema.FOO.BAR < 1), None, Constant(3))], From=self.schema.FOO, Limit=123 ).toSQL(), SQLFragment("select case when BAR < ? then null else ? end from FOO limit ?", [1, 3, 123]) ) <NEW_LINE> self.assertEqual(Case((self.schema.FOO.BAR < 1), None, Constant(3)).allColumns(), [self.schema.FOO.BAR, ])
A L{Case} object will generate an appropriate SQL statement.
625941ba5166f23b2e1a4fe5
def get_exif_data(image): <NEW_LINE> <INDENT> exif_data = {} <NEW_LINE> try: <NEW_LINE> <INDENT> info = image._getexif() <NEW_LINE> if info: <NEW_LINE> <INDENT> for tag, value in info.items(): <NEW_LINE> <INDENT> decoded = TAGS.get(tag, tag) <NEW_LINE> if decoded == "GPSInfo": <NEW_LINE> <INDENT> gps_data = {} <NEW_LINE> for gps_tag in value: <NEW_LINE> <INDENT> sub_decoded = GPSTAGS.get(gps_tag, gps_tag) <NEW_LINE> gps_data[sub_decoded] = value[gps_tag] <NEW_LINE> <DEDENT> exif_data[decoded] = gps_data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exif_data[decoded] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise KeyError('get_exif_data'+e) <NEW_LINE> <DEDENT> except AttributeError as e: <NEW_LINE> <INDENT> raise AttributeError('No EXIF') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return exif_data
Returns a dictionary from the exif data of an PIL Image item. Also converts the GPS Tags
625941baa8370b771705272c
def _ok_to_deallocate_displist(self): <NEW_LINE> <INDENT> raise Exception("subclass must implement")
Say whether it's ok to deallocate self's OpenGL display list right now (assuming our OpenGL context is current). [subclasses must override this]
625941ba507cdc57c6306b60
def get_insert_columns(self, join=True): <NEW_LINE> <INDENT> columns = "" <NEW_LINE> for item in self.columns: <NEW_LINE> <INDENT> thistype = item[1][0] <NEW_LINE> if ((thistype != "skip") and (thistype !="combine") and (self.contains_pk == True or thistype[0:3] != "pk-")): <NEW_LINE> <INDENT> columns += item[0] + ", " <NEW_LINE> <DEDENT> <DEDENT> columns = columns.rstrip(', ') <NEW_LINE> if join: <NEW_LINE> <INDENT> return columns <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return columns.lstrip("(").rstrip(")").split(", ")
Gets a set of column names for insert statements.
625941ba8a43f66fc4b53ef5
def run_command_verbose(self, cmd): <NEW_LINE> <INDENT> LOG.info("Running command %s", cmd) <NEW_LINE> stdout, stderr, exit_code = self.run_remote_cmd(cmd) <NEW_LINE> LOG.info("The command returned the output %s", stdout) <NEW_LINE> LOG.info("The stderr of the command was %s", stderr) <NEW_LINE> LOG.info("The exit code of the command was %s", exit_code) <NEW_LINE> return stdout
Run the given command and log anything it returns.
625941ba236d856c2ad4466a
def user_order_heartbeat(self,mission_id): <NEW_LINE> <INDENT> helper=DBHelper(1) <NEW_LINE> statement='update mission set status=2 where status=0 and end_time<'+str(time.time()) <NEW_LINE> helper.commit(statement) <NEW_LINE> now=time.time() <NEW_LINE> statement='update user_order set status=4 where %f-order_begin_time>"%d" and status=-1'%(now,3600) <NEW_LINE> helper.commit(statement) <NEW_LINE> statement='update user_order set status=-1 where status=-2 and mission_id='+str(mission_id)+' and order_begin_time<'+str(time.time()) <NEW_LINE> return helper.commit(statement)
更新任务状态,下发任务
625941ba8a349b6b435e8000
def normalise(self, meta, val): <NEW_LINE> <INDENT> if hasattr(self, "normalise_either"): <NEW_LINE> <INDENT> result = self.normalise_either(meta, val) <NEW_LINE> if result is not NotSpecified: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> if val is NotSpecified: <NEW_LINE> <INDENT> if hasattr(self, "normalise_empty"): <NEW_LINE> <INDENT> return self.normalise_empty(meta) <NEW_LINE> <DEDENT> elif hasattr(self, "default"): <NEW_LINE> <INDENT> return self.default(meta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return val <NEW_LINE> <DEDENT> <DEDENT> elif hasattr(self, "normalise_filled"): <NEW_LINE> <INDENT> return self.normalise_filled(meta, val) <NEW_LINE> <DEDENT> raise BadSpec("Spec doesn't know how to deal with this value", spec=self, meta=meta, val=val)
Use this spec to normalise our value
625941ba63f4b57ef0000fad
def test_index(self): <NEW_LINE> <INDENT> resp = self.app.get("/") <NEW_LINE> self.assertEqual(resp.status_code, status.HTTP_200_OK) <NEW_LINE> self.assertIn(b"NYU DevOps eCommerce Promotions", resp.data)
Test index call
625941ba9b70327d1c4e0c60
def create_test_posts(instance): <NEW_LINE> <INDENT> instance.post = Post( title="My first title", description="", source="some text [...] end of abstract", author= instance.staffUser ) <NEW_LINE> instance.post.save() <NEW_LINE> instance.post2 = Post( title="My second title", description="", source="some text [...] end of abstract", author= instance.staffUser ) <NEW_LINE> instance.post2.save() <NEW_LINE> instance.post3 = Post( title="My third title", description="", draft=True, source="some text [...] end of abstract", author= instance.staffUser ) <NEW_LINE> instance.post3.save() <NEW_LINE> instance.post4 = Post( title="My fourth title", description="", source="some text [...] end of abstract", author= instance.staffUser ) <NEW_LINE> instance.post4.save() <NEW_LINE> instance.post5 = Post( title="My fifth title", description="", source="some text [...] end of abstract", author= instance.staffUser ) <NEW_LINE> instance.post5.save()
Create 4 test posts for tests. run create_test_tags and create_test_users first.
625941ba99cbb53fe6792a73
def _loop_watching_for_changes(self): <NEW_LINE> <INDENT> while not self._quit_event.is_set(): <NEW_LINE> <INDENT> if self.ready: <NEW_LINE> <INDENT> if self._automatic_restarts: <NEW_LINE> <INDENT> self._handle_changes(_CHANGE_POLLING_MS) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(_CHANGE_POLLING_MS/1000.0)
Loops until the InstancePool is done watching for file changes.
625941ba66656f66f7cbc036
def update_docker_compose(self, default_rabbit_link="rabbitmq", toggle=False): <NEW_LINE> <INDENT> dc_filepath = os.path.join(self.project_dir, 'docker-compose.yml') <NEW_LINE> with open(dc_filepath, 'r') as _: <NEW_LINE> <INDENT> docker_compose = yaml.load(_) <NEW_LINE> docker_compose.pop('version') <NEW_LINE> <DEDENT> if 'services' not in docker_compose: <NEW_LINE> <INDENT> docker_compose['services'] = dict() <NEW_LINE> <DEDENT> if 'rabbitmq' not in docker_compose['services']: <NEW_LINE> <INDENT> queue_stack = defaultdict(dict) <NEW_LINE> queue_stack['rabbitmq']['image'] = 'rabbitmq:management' <NEW_LINE> queue_stack['rabbitmq']['ports'] = ["5672:5672", "15672:15672"] <NEW_LINE> queue_stack['rabbitmq']['env_file'] = [".env"] <NEW_LINE> docker_compose['services'].update(queue_stack) <NEW_LINE> env_vars = [ "RABBITMQ_DEFAULT_USER", "RABBITMQ_DEFAULT_PASS" ] <NEW_LINE> self.add_lines_to_env(env_vars) <NEW_LINE> <DEDENT> if toggle and self.component_name in docker_compose['services']: <NEW_LINE> <INDENT> docker_compose['services'].pop(self.component_name) <NEW_LINE> status = 'disabled' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_service = defaultdict(dict) <NEW_LINE> _, parent_rel_dir = os.path.split(self.parent_dir) <NEW_LINE> new_service[self.component_name]['build'] = './{}/{}'.format( parent_rel_dir, self.component_name) <NEW_LINE> if re.match(r"^mock_for.*", self.component_name): <NEW_LINE> <INDENT> real_component_name = re.sub(r'^mock_for_(.*)', r'\g<1>', self.component_name) <NEW_LINE> links = [default_rabbit_link, real_component_name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> links = [default_rabbit_link,] <NEW_LINE> <DEDENT> new_service[self.component_name]['links'] = links <NEW_LINE> docker_compose['services'].update(new_service) <NEW_LINE> status = 'enabled' <NEW_LINE> <DEDENT> with open(os.path.join(self.project_dir, 'docker-compose.yml'), 'w') as _: <NEW_LINE> <INDENT> _.write("version: '2.0'\n") <NEW_LINE> _.write(yaml.dump(docker_compose, default_flow_style=False)) <NEW_LINE> <DEDENT> return status
Generate docker-compose.yml
625941ba26238365f5f0ecf6
def endGeom(self, nodeName): <NEW_LINE> <INDENT> if self.writer is not None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> geom = Geom(self.vdata) <NEW_LINE> geom.addPrimitive(self.prim) <NEW_LINE> node = GeomNode("gui_geom") <NEW_LINE> node.addGeom(geom) <NEW_LINE> nodepath = NodePath(nodeName) <NEW_LINE> nodepath.attachNewNode(node) <NEW_LINE> if self.texture is not None: <NEW_LINE> <INDENT> nodepath.setTexture(self.texture) <NEW_LINE> <DEDENT> nodepath.setTransparency(True) <NEW_LINE> nodepath.setDepthWrite(False) <NEW_LINE> nodepath.setDepthTest(False) <NEW_LINE> nodepath.setTwoSided(True) <NEW_LINE> nodepath.setAttrib(LightAttrib.makeAllOff()) <NEW_LINE> nodepath.setBin('fixed', 0) <NEW_LINE> self.uv = None <NEW_LINE> self.vertex = None <NEW_LINE> self.pigment = None <NEW_LINE> self.vdata = None <NEW_LINE> return nodepath
Finishes creating the geometry and returns a node.
625941ba6fece00bbac2d5c7
def reflection_normal(outgoing_ray, incoming_ray): <NEW_LINE> <INDENT> ray1 = normalize(-incoming_ray) <NEW_LINE> ray2 = normalize(outgoing_ray) <NEW_LINE> return normalize((ray1 + ray2)/2)
Returns the normal vector between incoming and outgoing reflection rays.
625941ba3c8af77a43ae3629
def _wx_add_view_with(self, dock_control, with_obj): <NEW_LINE> <INDENT> with_item = self._wx_view_dock_window.get_control(with_obj.id) <NEW_LINE> if with_item is None: <NEW_LINE> <INDENT> raise ValueError("Cannot find item %s" % with_obj) <NEW_LINE> <DEDENT> with_item.parent.add(dock_control)
Adds a view in the same region as another item.
625941bac4546d3d9de728bd
def update(force_save=False): <NEW_LINE> <INDENT> need_save = find_changes() <NEW_LINE> need_save = need_save or force_save <NEW_LINE> global persistent_mtime <NEW_LINE> pairs = renpy.loadsave.location.load_persistent() <NEW_LINE> pairs.sort() <NEW_LINE> mtime = persistent_mtime <NEW_LINE> for mtime, other in pairs: <NEW_LINE> <INDENT> if mtime <= persistent_mtime: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if other is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> merge(other) <NEW_LINE> <DEDENT> persistent_mtime = mtime <NEW_LINE> if need_save: <NEW_LINE> <INDENT> save()
Loads the persistent data from persistent files that are newer than persistent_mtime, and merges it into the persistent object.
625941ba16aa5153ce362304