code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def read_domain_cfg(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cdir = os.path.abspath(os.path.join('.', 'cfg')) <NEW_LINE> config = configparser.ConfigParser( {'domain_name': self.domain_name}) <NEW_LINE> config.read(os.path.join(cdir, 'domain.cfg')) <NEW_LINE> if not config.has_section(self.domain_name): <NEW_LINE> <INDENT> sys.stderr.write( 'ERROR: the domain "%s" does not exist in cfg/domain.cfg\n' % self.domain_name) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> self.marathon_url = config.get(self.domain_name, 'marathon_url') <NEW_LINE> if self.marathon_url is None: <NEW_LINE> <INDENT> sys.stderr.write( 'ERROR: No marathon_url has been defined for domain "%s" in cfg/domain.cfg\n' % self.domain_name) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> except configparser.Error as e: <NEW_LINE> <INDENT> sys.stderr.write( 'ERROR: failed to read domain configuration ./cfg/domain.cfg, %s\n' % e) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> return config
returns the configuration from ./cfg/domain.cfg'
625941b8baa26c4b54cb0f8a
def delete_light(id, api_key=None): <NEW_LINE> <INDENT> user = db.query(Light).filter_by(id=id).first() <NEW_LINE> db.delete(light) <NEW_LINE> db.commit()
Delete a light :param id: Light id to delete :type id: int :param api_key: :type api_key: str :rtype: None
625941b8de87d2750b85fbf5
def test_some_rink_logged_in(client, some_rink, some_user): <NEW_LINE> <INDENT> client.post("/testing/api/login", data=json.dumps(some_user.json()), content_type='application/json') <NEW_LINE> rv = client.get(f"/rink/{some_rink.id}") <NEW_LINE> assert b'Book an available time today:' in rv.data
Test rink homepage for a logged in user.
625941b8379a373c97cfa9b1
def after_step(context, step): <NEW_LINE> <INDENT> if step.status == "failed": <NEW_LINE> <INDENT> context.logger.info(step.name + " : FAILED, Line: " + str(step.line)) <NEW_LINE> try: <NEW_LINE> <INDENT> if not os.path.exists('screenshots'): <NEW_LINE> <INDENT> os.makedirs('screenshots') <NEW_LINE> <DEDENT> __current_scenario_name = context.scenario.name.split("--")[0] <NEW_LINE> __screenshot_file_name = "screenshots" + os.path.sep + __current_scenario_name.replace(" ", "_") + "_" + strftime("%Y-%m-%d_%H-%M-%S") + '.png' <NEW_LINE> context.driver.save_screenshot(__screenshot_file_name) <NEW_LINE> context.logger.info("Screenshot is captured in file '" + __screenshot_file_name + "'") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> context.logger.error("Unable to take screenshot! Error: %s" % e, exc_info=True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> context.logger.info(step.name + " : PASSED")
Save screenshot in case of test step failure This function runs everytime after a step is executed. Check is step passed, then just log it and return if step fails and step is a part of portal scenario, take the screenshot of the failure. The screenshot file name is scenario_name.png where spaces within step name is replaced by '_' example: book_a_roundtrip_ticket_2016-12-01_12-34-32.png :param context: Holds contextual information during the running of tests :param step: Holds contextual information about step during the running of tests :return: None
625941b815fb5d323cde0971
def __init__(self, waypoints, sample_count=10): <NEW_LINE> <INDENT> self.waypoints = waypoints <NEW_LINE> self.sample_count = sample_count <NEW_LINE> segments = [_Segment(self.waypoints[i], self.waypoints[i+1]) for i in range(len(self.waypoints) - 1)] <NEW_LINE> self.length = sum([seg.length for seg in segments]) <NEW_LINE> self.sampled_points = [] <NEW_LINE> sample_step = self.length / (self.sample_count - 1) <NEW_LINE> cur_seg_i = 0 <NEW_LINE> cur_seg = segments[cur_seg_i] <NEW_LINE> len_accum = cur_seg.length <NEW_LINE> for p in range(self.sample_count): <NEW_LINE> <INDENT> sample_at_len = p * sample_step <NEW_LINE> while len_accum < sample_at_len and cur_seg_i < len(segments): <NEW_LINE> <INDENT> cur_seg_i += 1 <NEW_LINE> cur_seg = segments[cur_seg_i] <NEW_LINE> len_accum += cur_seg.length <NEW_LINE> <DEDENT> seg_start_len = len_accum - cur_seg.length <NEW_LINE> seg_frac = (sample_at_len-seg_start_len) / cur_seg.length <NEW_LINE> new_sampled_point = {} <NEW_LINE> for key in cur_seg.pts[0].keys(): <NEW_LINE> <INDENT> seg_coord_delta = cur_seg.pts[1][key] - cur_seg.pts[0][key] <NEW_LINE> new_sampled_point.update({key: cur_seg.pts[0][key] + seg_frac*seg_coord_delta}) <NEW_LINE> <DEDENT> self.sampled_points.append(new_sampled_point)
Defines a trajectory using a sequence of waypoints. For example:: waypoints = [{'latitude': 45, 'longitude': -60}, {'latitude': 45, 'longitude': 0}] Trajectory(waypoints) .. note:: All the waypoint dictionaries must contain the same coordinate names. Args: * waypoints A sequence of dictionaries, mapping coordinate names to values. Kwargs: * sample_count The number of sample positions to use along the trajectory.
625941b8442bda511e8be28d
def addDB( self, year, array ): <NEW_LINE> <INDENT> from moby2.instruments import actpol <NEW_LINE> self.data["obs_type"] = [] <NEW_LINE> self.data["obs_detail"] = [] <NEW_LINE> self.data["obs_drift"] = [] <NEW_LINE> self.data["alt"] = [] <NEW_LINE> self.data["az_min"] = [] <NEW_LINE> self.data["az_max"] = [] <NEW_LINE> self.data["scan_speed"] = [] <NEW_LINE> self.keys.extend(["obs_type", "obs_detail", "obs_drift", "alt, az_min", "az_max", "scan_speed"]) <NEW_LINE> self.types.extend(["str","str","str","float","float","float","float"]) <NEW_LINE> db = actpol.TODDatabase(config_file='/data/manifest_conf/manifest_%s.conf'%year) <NEW_LINE> obs_type = ["planet","scan"] <NEW_LINE> ids = db.select_tods(array=array, obs_type=obs_type) <NEW_LINE> for id in self.data["todName"]: <NEW_LINE> <INDENT> for r in ids: <NEW_LINE> <INDENT> if r.basename == id: <NEW_LINE> <INDENT> self.data["obs_type"].append(r.obs_type) <NEW_LINE> self.data["obs_detail"].append(r.obs_detail) <NEW_LINE> self.data["obs_drift"].append(r.obs_drift) <NEW_LINE> self.data["alt"].append(min([r.mean_alt,61])) <NEW_LINE> self.data["az_min"].append(r.min_az) <NEW_LINE> self.data["az_max"].append(r.max_az) <NEW_LINE> if r.scan_speed is None or r.scan_speed < 0: <NEW_LINE> <INDENT> self.data["scan_speed"].append(1.5) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data["scan_speed"].append(r.scan_speed) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> self.data["obs_type"] = np.array(self.data["obs_type"]) <NEW_LINE> self.data["obs_detail"] = np.array(self.data["obs_detail"]) <NEW_LINE> self.data["obs_drift"] = np.array(self.data["obs_drift"]) <NEW_LINE> self.data["alt"] = np.array(self.data["alt"]) <NEW_LINE> self.data["az_min"] = np.array(self.data["az_min"]) <NEW_LINE> self.data["az_max"] = np.array(self.data["az_max"]) <NEW_LINE> self.data["scan_speed"] = np.array(self.data["scan_speed"])
@brief Adds extra information about each TOD obtained from the database
625941b8d18da76e23532339
def write(self, cr, uid, ids, vals, context=None): <NEW_LINE> <INDENT> status = super(resource_mixin, self).write(cr, uid, ids, vals, context=context) <NEW_LINE> if status: <NEW_LINE> <INDENT> models = self.browse(cr, uid, ids, context=context) <NEW_LINE> resource = self.pool.get('booking.resource') <NEW_LINE> resource_ids = self._get_resources(cr, uid, ids, context) <NEW_LINE> if len(resource_ids) > 0 and len(models) > 0: <NEW_LINE> <INDENT> model = models[0] <NEW_LINE> mapping = self._map_values(model, vals) <NEW_LINE> status = resource.write(cr, uid, resource_ids, mapping, context=context) <NEW_LINE> <DEDENT> <DEDENT> return status
Update related booking resource
625941b8099cdd3c635f0ac4
def __init__(self, pickle:MyPickle, client_libraries: {str, str}, job_id: str, running_jobs: [Job], **kwargs): <NEW_LINE> <INDENT> super().__init__(pickle) <NEW_LINE> self.job_id = job_id <NEW_LINE> self.client_results_library = client_libraries["results"] <NEW_LINE> self.running_jobs = running_jobs
Create a new object. Args: pickle: Used to send and receive messages. running_jobs: A list of all the running jobs. job_id: The ID of the job that is required to know its resutls.
625941b856ac1b37e6264047
def test_index(self): <NEW_LINE> <INDENT> index_url = reverse('contentstore.views.index') <NEW_LINE> index_response = self.client.get(index_url, {}, HTTP_ACCEPT='text/html') <NEW_LINE> parsed_html = lxml.html.fromstring(index_response.content) <NEW_LINE> course_link_eles = parsed_html.find_class('course-link') <NEW_LINE> for link in course_link_eles: <NEW_LINE> <INDENT> self.assertRegexpMatches(link.get("href"), r'course/\w+\.\w+\.\w+.*/branch/\w+/block/.*') <NEW_LINE> outline_response = self.client.get(link.get("href"), {}, HTTP_ACCEPT='text/html') <NEW_LINE> outline_parsed = lxml.html.fromstring(outline_response.content) <NEW_LINE> outline_link = outline_parsed.find_class('course-link')[0] <NEW_LINE> self.assertEqual(outline_link.get("href"), link.get("href")) <NEW_LINE> course_menu_link = outline_parsed.find_class('nav-course-courseware-outline')[0] <NEW_LINE> self.assertEqual(course_menu_link.find("a").get("href"), link.get("href"))
Test getting the list of courses and then pulling up their outlines
625941b82eb69b55b151c712
def create_record(sample_files, path, i_min, i_max, q_min, q_max): <NEW_LINE> <INDENT> with open(path + ".tfrecord", "w") as fp: <NEW_LINE> <INDENT> with tf.python_io.TFRecordWriter(fp.name) as writer: <NEW_LINE> <INDENT> for sample_file in sample_files: <NEW_LINE> <INDENT> sample = pd.read_csv(sample_file, sep=",", names=("I", "Q")) <NEW_LINE> sample = (sample - (i_min, q_min)) / (i_max - i_min, q_max - q_min) <NEW_LINE> example = create_example(os.path.basename(sample_file), sample) <NEW_LINE> writer.write(example.SerializeToString())
Takes a list of dataframes representing samples and saves a tfrecord to path :param sample_files: :param path: :return:
625941b85166f23b2e1a4fc0
def getInterfaceNames(): <NEW_LINE> <INDENT> pass
Return a list of RemoteInterface names to which this object knows how to respond.
625941b8097d151d1a222cc4
def parse_number(val, use_dateutilparser=False): <NEW_LINE> <INDENT> if use_dateutilparser: <NEW_LINE> <INDENT> funcs = [int, float, parse_list_from_string, dateutil.parser.parse, str] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> funcs = [int, float, parse_list_from_string, str] <NEW_LINE> <DEDENT> if (val.strip().startswith("'") and val.strip().endswith("'")) or (val.strip().startswith('"') and val.strip().endswith('"')): <NEW_LINE> <INDENT> return val[1:-1] <NEW_LINE> <DEDENT> for f in funcs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return f(val) <NEW_LINE> <DEDENT> except (ValueError, UnicodeEncodeError, UnicodeDecodeError) as ve: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> raise ValueError('Cannot parse number:', val)
Try to auto-detect the numeric type of the value. First a conversion to int is tried. If this fails float is tried, and if that fails too, unicode() is executed. If this also fails, a ValueError is raised.
625941b8711fe17d825421da
def draw(self, screen, camera): <NEW_LINE> <INDENT> if self.hasImage: <NEW_LINE> <INDENT> translate = camera.apply_single((self.rect.x, self.rect.y), self.parallax) <NEW_LINE> screen.blit(self.image, translate) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> translate_points = camera.apply(self.get_corners(), self.parallax) <NEW_LINE> pygame.draw.polygon(screen, self.color, translate_points, 0)
draw function with parallax
625941b863f4b57ef0000f89
def on_sound_event(self, event_type, filter=None): <NEW_LINE> <INDENT> return self.on_object_event(event_type, Sound, 'Sound', filter=filter)
Listener for Sound related events :param event_type: Name of the event to register for.
625941b8498bea3a759b9919
def write(self, timestamp): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pickle.dump(timestamp, open(self.filename, "wb")) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logging.exception("Failed to dump timestamp %s to pickle in filename %s", timestamp, self.filename) <NEW_LINE> raise <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> os.chmod(self.filename, stat.S_IRWXU) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logging.exception("Failed to set permissions on filename %s", self.filename) <NEW_LINE> raise
Write the given timestamp to a pickled file. @type timestamp: datetime.datetime timestamp @type filename: string representing the filename of the file to write to. Defaults to None, and then it tries the location provided by the configuration, if any, i.e., self.vsc.cfg['ldap_timestamp_filename'] @raise: KeyError if the configuration data was used but no filename information was found
625941b871ff763f4b5494f6
def get_r2_data(self): <NEW_LINE> <INDENT> df = self.query("world_epidemic") <NEW_LINE> return df[["name","疫情地区","确诊"]]
获取世界各国的疫情数据 :return:
625941b81b99ca400220a918
def _wait_for_idle(self): <NEW_LINE> <INDENT> rc = self.client <NEW_LINE> rc.wait(timeout=5) <NEW_LINE> msg_ids = set(rc.history) <NEW_LINE> hub_hist = rc.hub_history() <NEW_LINE> for i in range(50): <NEW_LINE> <INDENT> if msg_ids.difference(hub_hist): <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> hub_hist = rc.hub_history() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(len(msg_ids.difference(hub_hist)), 0) <NEW_LINE> qs = rc.queue_status() <NEW_LINE> for i in range(50): <NEW_LINE> <INDENT> if qs['unassigned'] or any(qs[eid]['tasks'] + qs[eid]['queue'] for eid in rc.ids): <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> qs = rc.queue_status() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(qs['unassigned'], 0) <NEW_LINE> for eid in rc.ids: <NEW_LINE> <INDENT> self.assertEqual(qs[eid]['tasks'], 0) <NEW_LINE> self.assertEqual(qs[eid]['queue'], 0)
wait for the cluster to become idle, according to the everyone.
625941b821bff66bcd6847bd
def addTransition(self, transition): <NEW_LINE> <INDENT> self.transitions.append(transition)
:param transition: :return:
625941b80a50d4780f666cf7
def extract_info_from_source(file): <NEW_LINE> <INDENT> with open(file, 'r') as f: <NEW_LINE> <INDENT> for line in f.readlines(): <NEW_LINE> <INDENT> data = re.match(r'\s\*\s+(#.*)', line) <NEW_LINE> if data: <NEW_LINE> <INDENT> p_info = data.group(1) <NEW_LINE> t = p_info.split(', ') <NEW_LINE> if len(t) == 2: <NEW_LINE> <INDENT> p_num = int(t[0].strip()[1:]) <NEW_LINE> p_url = t[1].strip() <NEW_LINE> return p_num, p_url, file
(p_id, p_url, p_file)
625941b896565a6dacc8f53d
def test_read_count(self): <NEW_LINE> <INDENT> _, s3_counts = geoprocessing.count(self.geom, self.url) <NEW_LINE> _, disk_counts = geoprocessing.count(self.geom, NLCD_PATH) <NEW_LINE> self.assertDictEqual(s3_counts, disk_counts, "Reading the same offset from a local and s3 " + "file did not produce equivalent results")
Test a small byte offset raster read using vsicurl s3 url
625941b83346ee7daa2b2bd1
def load_pokecrystal_macros(): <NEW_LINE> <INDENT> ourmacros = [] <NEW_LINE> even_more_macros = [ Warp, XYTrigger, Signpost, PeopleEvent, DataByteWordMacro, ChannelCommand, OctaveCommand, Note, ] <NEW_LINE> ourmacros += command_classes <NEW_LINE> ourmacros += even_more_macros <NEW_LINE> ourmacros += [each[1] for each in text_command_classes] <NEW_LINE> ourmacros += movement_command_classes <NEW_LINE> ourmacros += music_classes <NEW_LINE> ourmacros += effect_classes <NEW_LINE> ourmacros += battle_animation_classes + [BattleAnimWait] <NEW_LINE> return ourmacros
Construct a list of macros that are needed for pokecrystal preprocessing.
625941b891af0d3eaac9b87c
def parse_news(self, response): <NEW_LINE> <INDENT> print('===SankeiComSpider parse_news:',response.url) <NEW_LINE> now = datetime.now() <NEW_LINE> yield NewsDB_Item( url=response.url, response_time=now, response_headers=pickle.dumps(response.headers), response_body=pickle.dumps(response.body), _id=(str(self.allowed_domains[0])+'@'+str(now).replace(' ','@',)), )
CallBack関数として使用。取得したレスポンスより、次のurlを生成し追加している。
625941b863f4b57ef0000f8a
def test_no_subclassing(self): <NEW_LINE> <INDENT> document = Document(foo=123)
document base class can be instantiated itself
625941b876e4537e8c3514df
def display_character(window, name, path_template): <NEW_LINE> <INDENT> border_size = 20 <NEW_LINE> path = ".".join((path_template, "200", "png")) <NEW_LINE> pic = pygame.image.load(path) <NEW_LINE> pic_w, pic_h = pic.get_size() <NEW_LINE> text = ft_title.render(" ".join(("<-", name, "->")), 1, WHITE) <NEW_LINE> text_w, text_h = text.get_size() <NEW_LINE> pygame.draw.rect(window, GREY, (SCREEN_W/2 - pic_w/2 - border_size, SCREEN_H/2 - pic_h/2 - text_h - border_size, pic_w + border_size*2, pic_h + border_size*2), border_size) <NEW_LINE> window.blit(pic, (SCREEN_W/2 - pic_w/2, SCREEN_H/2 - pic_h/2 - text_h)) <NEW_LINE> window.blit(text, (SCREEN_W/2 - text_w/2, SCREEN_H/2 + pic_h/2 - text_h/2))
Screen to select a character.
625941b8e5267d203edcdb09
@commanddeco <NEW_LINE> def get(argvs): <NEW_LINE> <INDENT> global last_list <NEW_LINE> argv = argvs.split(' ') <NEW_LINE> if(argv[0] == ''): <NEW_LINE> <INDENT> argv[0] = 0 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> page = int(argv[0]) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError('Please enter valid number.') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> no = int(argv[1]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> no = 1 <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError('Please enter valid number.') <NEW_LINE> <DEDENT> if(last_list is None): <NEW_LINE> <INDENT> last_list = dl(gall, no, view_recommend, simple_image_name=shortname) <NEW_LINE> <DEDENT> page = last_list[page] <NEW_LINE> if(page is not None): <NEW_LINE> <INDENT> page.show() <NEW_LINE> imagemode(page.get_image())
usage: get (i-th) (number of list page) get n-th page on the gallery page list
625941b84d74a7450ccd402b
def _prepare_headers(self, content_type=None, extra_headers=None): <NEW_LINE> <INDENT> return self.__deprecated_call( '_prepare_headers', content_type=content_type, extra_headers=extra_headers)
Put together and return a complete set of headers. If ``content_type`` is provided, it will be added as the Content-type header. Any provided ``extra_headers`` will be added last. You probably never need to call this method directly.
625941b87047854f462a1275
def anagramMappings(self, A, B): <NEW_LINE> <INDENT> output=[] <NEW_LINE> for x in A: <NEW_LINE> <INDENT> output.append(B.index(x)) <NEW_LINE> <DEDENT> return output
:type A: List[int] :type B: List[int] :rtype: List[int]
625941b850812a4eaa59c18d
def test_madevent_ufo_aloha(self): <NEW_LINE> <INDENT> if os.path.isdir(self.out_dir): <NEW_LINE> <INDENT> shutil.rmtree(self.out_dir) <NEW_LINE> <DEDENT> self.do('import model sm') <NEW_LINE> self.do('set group_subprocesses False') <NEW_LINE> self.do('generate e+ e- > e+ e-') <NEW_LINE> self.do('output %s ' % self.out_dir) <NEW_LINE> files = ['aloha_file.inc', 'FFV1_0.f', 'FFV1P0_3.f', 'FFV2_0.f', 'FFV2_3.f', 'FFV4_0.f', 'FFV4_3.f', 'makefile', 'aloha_functions.f'] <NEW_LINE> for f in files: <NEW_LINE> <INDENT> self.assertTrue(os.path.isfile(os.path.join(self.out_dir, 'Source', 'DHELAS', f)), '%s file is not in aloha directory' % f) <NEW_LINE> <DEDENT> self.check_aloha_file() <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'Cards', 'ident_card.dat'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'Cards', 'run_card_default.dat'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'Cards', 'plot_card_default.dat'))) <NEW_LINE> devnull = open(os.devnull,'w') <NEW_LINE> status = subprocess.call(['make'], stdout=devnull, cwd=os.path.join(self.out_dir, 'Source')) <NEW_LINE> self.assertEqual(status, 0) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'lib', 'libdhelas.a'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'lib', 'libmodel.a'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'lib', 'libgeneric.a'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'lib', 'libcernlib.a'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'lib', 'libdsample.a'))) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'lib', 'libpdf.a'))) <NEW_LINE> status = subprocess.call(['make', 'gensym'], stdout=devnull, cwd=os.path.join(self.out_dir, 'SubProcesses', 'P0_epem_epem')) <NEW_LINE> self.assertEqual(status, 0) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'SubProcesses', 'P0_epem_epem', 'gensym'))) <NEW_LINE> proc = subprocess.Popen('./gensym', stdout=devnull, stdin=subprocess.PIPE, cwd=os.path.join(self.out_dir, 'SubProcesses', 'P0_epem_epem'), shell=True) <NEW_LINE> proc.communicate('100 2 0.1 .false.\n') <NEW_LINE> self.assertEqual(proc.returncode, 0) <NEW_LINE> status = subprocess.call(['make', 'madevent'], stdout=devnull, cwd=os.path.join(self.out_dir, 'SubProcesses', 'P0_epem_epem')) <NEW_LINE> self.assertEqual(status, 0) <NEW_LINE> self.assertTrue(os.path.exists(os.path.join(self.out_dir, 'SubProcesses', 'P0_epem_epem', 'madevent')))
Test MadEvent output with UFO/ALOHA
625941b8a8370b7717052708
def create_task(self, coro, description, handle_exceptions=True): <NEW_LINE> <INDENT> async def task_wrapper(task_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = await coro <NEW_LINE> logger.debug("Task manager %s: finished task %d (%s)", self._name, task_id, description) <NEW_LINE> return result <NEW_LINE> <DEDENT> except asyncio.CancelledError: <NEW_LINE> <INDENT> if handle_exceptions: <NEW_LINE> <INDENT> logger.debug("Task manager %s: canceled task %d (%s)", self._name, task_id, description) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> if handle_exceptions: <NEW_LINE> <INDENT> logger.exception("Task manager %s: exception raised in task %d (%s)", self._name, task_id, description) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> del self._tasks[task_id] <NEW_LINE> <DEDENT> <DEDENT> task_id = next(self._task_counter) <NEW_LINE> logger.debug("Task manager %s: creating task %d (%s)", self._name, task_id, description) <NEW_LINE> task = asyncio.create_task(task_wrapper(task_id)) <NEW_LINE> self._tasks[task_id] = task <NEW_LINE> return task
Wrapper around asyncio.create_task - takes care of canceling tasks on shutdown
625941b83539df3088e2e1b3
def extended_state_cb(data): <NEW_LINE> <INDENT> global drone_status <NEW_LINE> landed_states = [None, True, False] <NEW_LINE> drone_status["landed"] = landed_states[data.landed_state]
Callback extended state. @param data: extended state FCU @type data: mavros_msgs.msg.ExtendedState
625941b855399d3f0558851b
def start_up_mgr(dbinfo,mgrinfo,bootstrap_tag): <NEW_LINE> <INDENT> mgr_user = mgrinfo['mgr_user'] <NEW_LINE> mgr_password = mgrinfo['mgr_password'] <NEW_LINE> USER = "root" <NEW_LINE> PASS = "mysql" <NEW_LINE> mysqlcon = pymysql.connect(user=USER,password=PASS,host=dbinfo["ipaddr"],port=int(dbinfo["port"])) <NEW_LINE> cur = mysqlcon.cursor() <NEW_LINE> CR_USER = "CREATE USER " + mgr_user + " IDENTIFIED BY " + "'" + mgr_password + "'" <NEW_LINE> GR_USER = "GRANT REPLICATION SLAVE ON *.* TO " + mgr_user <NEW_LINE> MASTER_USER=mgr_user.split("@")[0] <NEW_LINE> CG_MASTER="CHANGE MASTER TO MASTER_USER="+MASTER_USER+", MASTER_PASSWORD='"+mgr_password+"' FOR CHANNEL 'group_replication_recovery'" <NEW_LINE> cur.execute("SET SQL_LOG_BIN=0") <NEW_LINE> cur.execute(CR_USER) <NEW_LINE> cur.execute(GR_USER) <NEW_LINE> cur.execute(CG_MASTER) <NEW_LINE> cur.execute("FLUSH PRIVILEGES") <NEW_LINE> cur.execute("SET SQL_LOG_BIN=1") <NEW_LINE> cur.execute("INSTALL PLUGIN group_replication SONAME 'group_replication.so'") <NEW_LINE> if bootstrap_tag == 1: <NEW_LINE> <INDENT> cur.execute("SET GLOBAL group_replication_bootstrap_group=ON ") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur.execute("SET global group_replication_allow_local_disjoint_gtids_join=ON") <NEW_LINE> <DEDENT> cur.execute("START GROUP_REPLICATION") <NEW_LINE> cur.execute("SET GLOBAL group_replication_bootstrap_group=off") <NEW_LINE> cur.execute("SET GLOBAL group_replication_bootstrap_group=off") <NEW_LINE> cur.close() <NEW_LINE> mysqlcon.close() <NEW_LINE> logger.info("The MySQL group replication on "+dbinfo["ipaddr"]+" database port "+dbinfo["port"]+" online.")
:param dbinfo: dbinfo is a dict. :param mgrinfo: mgrinfo is a dict. :param bootstrap_tag: bootstrap_tag is a number. :return:
625941b8c4546d3d9de72898
def GetUpper(self): <NEW_LINE> <INDENT> return _itkConnectedThresholdImageFilterPython.itkConnectedThresholdImageFilterIUS2IUS2_GetUpper(self)
GetUpper(self) -> unsigned short
625941b899cbb53fe6792a4f
def stores(self, system=False, remote=False, parse=True): <NEW_LINE> <INDENT> if parse and getattr(self.options, 'stores', None): <NEW_LINE> <INDENT> for guid in self.options.stores: <NEW_LINE> <INDENT> if guid == 'public': <NEW_LINE> <INDENT> if not self.public_store: <NEW_LINE> <INDENT> raise NotFoundError("no public store") <NEW_LINE> <DEDENT> yield self.public_store <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield Store(guid, server=self) <NEW_LINE> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> table = self.ems.GetMailboxTable(None, 0) <NEW_LINE> table.SetColumns([PR_DISPLAY_NAME_W, PR_ENTRYID], 0) <NEW_LINE> for row in table.QueryRows(-1, 0): <NEW_LINE> <INDENT> store = Store(mapiobj=self.mapisession.OpenMsgStore(0, row[1].Value, None, MDB_WRITE), server=self) <NEW_LINE> if system or store.public or (store.user and store.user.name != 'SYSTEM'): <NEW_LINE> <INDENT> yield store
Return all :class:`stores <Store>` on server node :param system: include system stores :param remote: include stores on other nodes
625941b8236d856c2ad44646
def load_data(self): <NEW_LINE> <INDENT> stock = [] <NEW_LINE> pdb = self.m_folder + self.m_id + ".pdb" <NEW_LINE> p = PDBParser() <NEW_LINE> structure = p.get_structure("prot", pdb) <NEW_LINE> model = structure[0] <NEW_LINE> dssp = DSSP(model, pdb) <NEW_LINE> stock.append(dssp) <NEW_LINE> return stock
Load all the pdb files in the folder. Create BIO.PDB.DSSP.DSSP object to stock each data's file
625941b8656771135c3eb6da
def correspondencia_Cadenas(cadenaBase,cadenaComplem): <NEW_LINE> <INDENT> complementoReal = retorna_complemento(cadenaBase) <NEW_LINE> correspondencia = False <NEW_LINE> if (complementoReal == cadenaComplem): <NEW_LINE> <INDENT> correspondencia = True <NEW_LINE> <DEDENT> return correspondencia
(str,str)-> boolean #casos de prueba >>> correspondencia_Cadenas("AT","TA") True >>> correspondencia_Cadenas("TA","TG") False >>> correspondencia_Cadenas("GC","CG") True :param cadenaBase: cadena base DE ADN A COMPARAR :param cadenaComplem: CADENA SECUNDARIA DE ADN A COMPARAR :return: TRUE SI LA CADENA ES CORRESPONDIENTE Y FALSE SI NO LO ES
625941b80fa83653e4656e25
def test_009(self): <NEW_LINE> <INDENT> with pytest.raises(TypeError): <NEW_LINE> <INDENT> words = Words("one", demonstrative=12)
Words constructor = demonstrative is not a bool
625941b86fece00bbac2d5a3
def setCompPolicyForCell(*args): <NEW_LINE> <INDENT> return _ParaMEDMEM.MEDLoader_setCompPolicyForCell(*args)
setCompPolicyForCell(int val) 1
625941b8187af65679ca4f85
def exists(self, client=None): <NEW_LINE> <INDENT> client = self._require_client(client) <NEW_LINE> query_params = self._query_params <NEW_LINE> query_params["fields"] = "name" <NEW_LINE> try: <NEW_LINE> <INDENT> client._connection.api_request( method="GET", path=self.path, query_params=query_params, _target_object=None, ) <NEW_LINE> return True <NEW_LINE> <DEDENT> except NotFound: <NEW_LINE> <INDENT> return False
Determines whether or not this blob exists. If :attr:`user_project` is set on the bucket, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Client` or ``NoneType`` :param client: Optional. The client to use. If not passed, falls back to the ``client`` stored on the blob's bucket. :rtype: bool :returns: True if the blob exists in Cloud Storage.
625941b83eb6a72ae02ec343
def closeConnection(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.driver.close() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise Exception(f"(closeConnection) - Something went wrong on closing connection.\n" + str(e))
This function closes the connection
625941b8aad79263cf3908a3
def convert_postgres_to_fb_schema(schema): <NEW_LINE> <INDENT> fbschema = copy.deepcopy(schema) <NEW_LINE> for domain in fbschema.domains: <NEW_LINE> <INDENT> domain.name = domain.name[0:31] if domain.name else None <NEW_LINE> domain.data_type = fb_data_type(domain.data_type) <NEW_LINE> <DEDENT> for tname, table in fbschema.tables.items(): <NEW_LINE> <INDENT> table.name = table.name[0:31] <NEW_LINE> for fname, field in table.fields.items(): <NEW_LINE> <INDENT> field.name = field.name[0:31] <NEW_LINE> <DEDENT> for iname, index in table.indices.items(): <NEW_LINE> <INDENT> index.name = index.name[0:31] <NEW_LINE> <DEDENT> for cname, contraint in table.constraints.items(): <NEW_LINE> <INDENT> contraint.name = contraint.name[0:31] <NEW_LINE> <DEDENT> <DEDENT> return fbschema
Метод преобразует схему, полученную из PostgreSQL, в представление схемы в FB.
625941b8d10714528d5ffb47
def wait(debug): <NEW_LINE> <INDENT> if debug: <NEW_LINE> <INDENT> c = input("<CR>=Continue, q=Quit ") <NEW_LINE> if c.upper() == 'Q': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Debug tool
625941b88e71fb1e9831d615
@click.argument('slides_markdown_file', type=click.Path(exists=True)) <NEW_LINE> @click.option('--html-template', type=click.Path(exists=True), default=DEFAULT_HTML_FILE, help='Jinja2 template file for the presentation.') <NEW_LINE> @click.option('--css-file', '-c', type=click.Path(exists=True), default=DEFAULT_CSS_FILE, help='Custom CSS to be included inline.') <NEW_LINE> @click.option('--output-file', '-o', type=click.File('w', encoding='utf8'), default=sys.stdout, help='Write the output to a file instead of STDOUT.') <NEW_LINE> @click.option('--title', '-t', default='Presentation', help='HTML title of the presentation.') <NEW_LINE> @click.option('--verbose', '-v', is_flag=True, help='Output debugging info.') <NEW_LINE> @click.version_option() <NEW_LINE> @click.command() <NEW_LINE> def remarker(slides_markdown_file, html_template, css_file, output_file, title, verbose): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> click.echo('Input:', err=True) <NEW_LINE> click.echo('slides_markdown_file: {}'.format(slides_markdown_file), err=True) <NEW_LINE> click.echo('html-template: {}'.format(html_template), err=True) <NEW_LINE> click.echo('css-file: {}'.format(css_file), err=True) <NEW_LINE> click.echo('Output file: {}'.format(output_file), err=True) <NEW_LINE> <DEDENT> template_html = loadfile(html_template) <NEW_LINE> slide_markdown = loadfile(slides_markdown_file) <NEW_LINE> stylesheet_html = loadfile(css_file) <NEW_LINE> output_html = presentation.generate_html(template_html, slide_markdown, stylesheet_html, title=title) <NEW_LINE> output_file.write(output_html)
Generate a Remark.js HTML presentation from input Markdown and optional custom CSS.
625941b86aa9bd52df036c0a
def validateSiteSchema(config): <NEW_LINE> <INDENT> log.info('Validating site XML against schema...') <NEW_LINE> schemafname = os.path.join(config.schemadir, 'site.xsd') <NEW_LINE> xmlSchemaValidate(config, schema=schemafname, target=config.fullsitexml)
Validate that our site XML matches our custom schema. Assumes site.xml has been preprocessed.
625941b89f2886367277a6f9
def ExportCreateRebarMesh(self,pRebarMesh): <NEW_LINE> <INDENT> pass
ExportCreateRebarMesh(self: DelegateFake,pRebarMesh: dotRebarMesh_t) -> (int,dotRebarMesh_t)
625941b86fb2d068a760ef0a
def to_daccess(daccess): <NEW_LINE> <INDENT> if daccess is None: <NEW_LINE> <INDENT> daccess = DataAccess.ReadWrite <NEW_LINE> <DEDENT> elif isinstance(daccess, (str, unicode)): <NEW_LINE> <INDENT> daccess = DACCESS_MAP.get( from_access_str(daccess), DataAccess.ReadWrite) <NEW_LINE> <DEDENT> return daccess
Transforms the given access parameter (string or None) into a :obj:`DataAccess`. If None is given returns :obj:`DataAccess.ReadWrite` :param dtype: the access to be transformed :type dtype: str :return: a :obj:`DataAccess` for the given access :rtype: :obj:`DataAccess`
625941b84e4d5625662d4245
def set_outputs(self, analog_output): <NEW_LINE> <INDENT> with AutoUpdater._lock: <NEW_LINE> <INDENT> if isinstance(analog_output, Bus) and (analog_output.analog): <NEW_LINE> <INDENT> AutoUpdater.remove_link( self.outputs) <NEW_LINE> AutoUpdater.add_link( self.outputs, analog_output, bind_to=D2A.trigger, params=[self]) <NEW_LINE> <DEDENT> elif isinstance(analog_output, Connector) and (analog_output.analog): <NEW_LINE> <INDENT> AutoUpdater.remove_link( self.outputs) <NEW_LINE> AutoUpdater.add_link( self.outputs, [analog_output], bind_to=D2A.trigger, params=[self]) <NEW_LINE> raise Exception( "ERROR: Invalid output. Output must only be a analog Bus / Connector instance")
To link the output of the D2A instance with the external output
625941b867a9b606de4a7d25
def keys(self): <NEW_LINE> <INDENT> return GramGenerator(self.path, self.elem, keys=True, ignore_hash=self.ignore_hash)
Returns a :class:`GramGenerator` that produces only keys.
625941b8851cf427c661a381
def delete_current_notebook(self): <NEW_LINE> <INDENT> panel = self.GetParent() <NEW_LINE> nb = panel.GetParent() <NEW_LINE> current = nb.GetSelection() <NEW_LINE> count = nb.GetPageCount() <NEW_LINE> if count > 1: <NEW_LINE> <INDENT> for i in range(count-1, current-1, -1): <NEW_LINE> <INDENT> nb.DeletePage(i) <NEW_LINE> <DEDENT> nb.GetCurrentPage().grid.SetFocus() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> frame = nb.GetParent() <NEW_LINE> frame.SetStatusText("This is the last level!")
deletes the current notebook tab
625941b84f88993c3716bedc
def insert_table(self, table): <NEW_LINE> <INDENT> if table == "Shape": <NEW_LINE> <INDENT> return self.insert_shapes(table) <NEW_LINE> <DEDENT> df = self.dict_df[table] <NEW_LINE> try: <NEW_LINE> <INDENT> self.db.insert_dataframe(df, table) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.log_error(e, table) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.log(logging.INFO, "{} insert".format(table))
Insert df.
625941b99b70327d1c4e0c3c
def train(self, X, y, learning_rate=1e-3, reg=1e-5, num_iters=100, batch_size=200, verbose=False): <NEW_LINE> <INDENT> num_train, dim = X.shape <NEW_LINE> num_classes = np.max(y) + 1 <NEW_LINE> if self.W is None: <NEW_LINE> <INDENT> self.W = 0.001 * np.random.randn(dim, num_classes) <NEW_LINE> <DEDENT> loss_history = [] <NEW_LINE> for it in xrange(num_iters): <NEW_LINE> <INDENT> X_batch = None <NEW_LINE> y_batch = None <NEW_LINE> batch_ix = np.random.choice(np.arange(num_train), size = batch_size) <NEW_LINE> X_batch = X[batch_ix, :] <NEW_LINE> y_batch = y[batch_ix] <NEW_LINE> loss, grad = self.loss(X_batch, y_batch, reg) <NEW_LINE> loss_history.append(loss) <NEW_LINE> self.W -= learning_rate * grad <NEW_LINE> if verbose and it % 100 == 0: <NEW_LINE> <INDENT> print('iteration %d / %d: loss %f' % (it, num_iters, loss)) <NEW_LINE> <DEDENT> <DEDENT> return loss_history
Train this linear classifier using stochastic gradient descent. Inputs: - X: A numpy array of shape (N, D) containing training data; there are N training samples each of dimension D. - y: A numpy array of shape (N,) containing training labels; y[i] = c means that X[i] has label 0 <= c < C for C classes. - learning_rate: (float) learning rate for optimization. - reg: (float) regularization strength. - num_iters: (integer) number of steps to take when optimizing - batch_size: (integer) number of training examples to use at each step. - verbose: (boolean) If true, print progress during optimization. Outputs: A list containing the value of the loss function at each training iteration.
625941b8be8e80087fb20ab8
def Clear(self): <NEW_LINE> <INDENT> pass
Removes all records. :return: The result status.
625941b9cdde0d52a9e52e97
def Generate(self): <NEW_LINE> <INDENT> c = Code() <NEW_LINE> (c.Append(self._GetHeader(sys.argv[0], self._namespace.name)) .Append()) <NEW_LINE> c.Cblock(self._GenerateNamespaceObject()) <NEW_LINE> for js_type in self._namespace.types.values(): <NEW_LINE> <INDENT> c.Cblock(self._GenerateType(js_type)) <NEW_LINE> <DEDENT> for function in self._namespace.functions.values(): <NEW_LINE> <INDENT> c.Cblock(self._GenerateFunction(function)) <NEW_LINE> <DEDENT> for event in self._namespace.events.values(): <NEW_LINE> <INDENT> c.Cblock(self._GenerateEvent(event)) <NEW_LINE> <DEDENT> c.TrimTrailingNewlines() <NEW_LINE> return c
Generates a Code object with the schema for the entire namespace.
625941b98e71fb1e9831d616
def test_resource_owner_password_credentials(self): <NEW_LINE> <INDENT> from oauth2.tokenview import OAuth2TokenView <NEW_LINE> data = {'grant_type': 'password'} <NEW_LINE> request = Authentication.factory.post('/', data=data, HTTP_X_REAL_IP='127.0.0.1') <NEW_LINE> response = OAuth2TokenView.as_view()(request) <NEW_LINE> self.assertEqual(response.status_code, 400) <NEW_LINE> self.assertEqual(response.content, json.dumps({'error': 'invalid_request'})) <NEW_LINE> data.update({'username': 'admin_npg', 'password': 'foobar'}) <NEW_LINE> request = Authentication.factory.post('/', data=data, HTTP_X_REAL_IP='127.0.0.1') <NEW_LINE> response = OAuth2TokenView.as_view()(request) <NEW_LINE> self.assertEqual(response.status_code, 400) <NEW_LINE> self.assertEqual(response.content, json.dumps({'error': 'invalid_client'})) <NEW_LINE> data.update({'username': 'admin_na', 'password': 'admin_na'}) <NEW_LINE> request = Authentication.factory.post('/', data=data, HTTP_X_REAL_IP='127.0.0.1') <NEW_LINE> response = OAuth2TokenView.as_view()(request) <NEW_LINE> self.assertEqual(response.status_code, 400) <NEW_LINE> self.assertEqual(response.content, json.dumps({'error': 'inactive_user'})) <NEW_LINE> data.update({'username': 'admin_npg', 'password': 'admin_npg'}) <NEW_LINE> request = Authentication.factory.post('/', data=data, HTTP_X_REAL_IP='127.0.0.1') <NEW_LINE> response = OAuth2TokenView.as_view()(request) <NEW_LINE> self.assertEqual(response.status_code, 400) <NEW_LINE> self.assertEqual(response.content, json.dumps({'error': 'unauthorized_client'})) <NEW_LINE> data.update({'username': 'admin', 'password': 'admin'}) <NEW_LINE> request = Authentication.factory.post('/', data=data, HTTP_X_REAL_IP='127.0.0.1') <NEW_LINE> response = OAuth2TokenView.as_view()(request) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> response_content = json.loads(response.content) <NEW_LINE> self.assertIn('access_token', response_content) <NEW_LINE> result = {'access_token': response_content['access_token'], 'token_type': 'bearer', 'expires_in': 86400} <NEW_LINE> self.assertDictEqual(response_content, result)
Validates the Resource Owner Password Credentials
625941b91f037a2d8b946067
@pytest.mark.idempotent_id('eb1e03e4-e0b4-42b8-8a8f-36d500929639') <NEW_LINE> def test_delete_container(cli_swift_steps, container_steps): <NEW_LINE> <INDENT> container_name = next(utils.generate_ids()) <NEW_LINE> container_steps.create(name=container_name) <NEW_LINE> cli_swift_steps.delete(container_name)
**Scenario**: Check container have been deleted successfully. **Steps:** #. Create container via Swift API #. Remove this container using Swift CLI
625941b93c8af77a43ae3607
def reset_content_type_for_mobile(request, response): <NEW_LINE> <INDENT> ct, doctype = get_content_type_and_doctype(request) <NEW_LINE> response.setHeader("content-type", ct)
http://www.google.com/support/webmasters/bin/answer.py?answer=40348
625941b9ec188e330fd5a60f
def minDeletionSize(self, A: List[str]) -> int: <NEW_LINE> <INDENT> a_size = len(A) <NEW_LINE> if a_size == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> n = len(A[0]) <NEW_LINE> ret = 0 <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> flag = False <NEW_LINE> for j in range(a_size - 1): <NEW_LINE> <INDENT> if A[j][i] > A[j+1][i]: <NEW_LINE> <INDENT> flag = True <NEW_LINE> break <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> if flag: <NEW_LINE> <INDENT> ret += 1 <NEW_LINE> <DEDENT> <DEDENT> return ret
思路:暴力法,一列列遍历,只需要比较相邻两个即可(这点有些像贪心算法) :param A: :return:
625941b97cff6e4e811177ee
def get_fingerprint_detail(): <NEW_LINE> <INDENT> print("Getting image...", end="", flush=True) <NEW_LINE> i = finger.get_image() <NEW_LINE> if i == adafruit_fingerprint.OK: <NEW_LINE> <INDENT> print("Image taken") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if i == adafruit_fingerprint.NOFINGER: <NEW_LINE> <INDENT> print("No finger detected") <NEW_LINE> <DEDENT> elif i == adafruit_fingerprint.IMAGEFAIL: <NEW_LINE> <INDENT> print("Imaging error") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Other error") <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> print("Templating...", end="", flush=True) <NEW_LINE> i = finger.image_2_tz(1) <NEW_LINE> if i == adafruit_fingerprint.OK: <NEW_LINE> <INDENT> print("Templated") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if i == adafruit_fingerprint.IMAGEMESS: <NEW_LINE> <INDENT> print("Image too messy") <NEW_LINE> <DEDENT> elif i == adafruit_fingerprint.FEATUREFAIL: <NEW_LINE> <INDENT> print("Could not identify features") <NEW_LINE> <DEDENT> elif i == adafruit_fingerprint.INVALIDIMAGE: <NEW_LINE> <INDENT> print("Image invalid") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Other error") <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> print("Searching...", "end=", flush=True) <NEW_LINE> i = finger.finger_fast_search() <NEW_LINE> if i == adafruit_fingerprint.OK: <NEW_LINE> <INDENT> print("Found fingerprint!") <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if i == adafruit_fingerprint.NOTFOUND: <NEW_LINE> <INDENT> print("No match found") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Other error") <NEW_LINE> <DEDENT> return False
Get a finger print image, template it, and see if it matches! This time, print out each error instead of just returning on failure
625941b950485f2cf553cc01
def owes_money(self, acct_id): <NEW_LINE> <INDENT> acct = self.get_acct(acct_id) <NEW_LINE> if acct: <NEW_LINE> <INDENT> return float(acct["due"]) > float(acct["paid"]) <NEW_LINE> <DEDENT> return None
Return TRUE if account owes money
625941b945492302aab5e129
def test_loading_manual_tof_in_micros_units(self): <NEW_LINE> <INDENT> _tof_array = np.array([1.e6, 2.e6, 3.e6, 4.e6, 5.e6, 6.e6, 7.e6, 8.e6, 9.e6]) <NEW_LINE> _tof_units = 'micros' <NEW_LINE> _tof_handler = TOF(tof_array = _tof_array, units = _tof_units) <NEW_LINE> self.assertTrue(all(_tof_array*1.e-6 == _tof_handler.tof_array))
Assert in TOF - TOF(micros) array is correctly manually loaded and units are converted
625941b9d7e4931a7ee9dd84
def text_from_pdf(raw_text): <NEW_LINE> <INDENT> ignore_terms = ['EINECS No :', 'EINECS No '] <NEW_LINE> ingredient_pattern = re.compile('Natural\s+occurrence:(.+)') <NEW_LINE> compound_pattern = re.compile('(.*[A-Z]{6,20}.*)') <NEW_LINE> blobs = raw_text.split('\n\n') <NEW_LINE> flavor_dict = defaultdict(list) <NEW_LINE> comp_q = deque([], maxlen=3) <NEW_LINE> ing_q = deque([], maxlen=2) <NEW_LINE> for blob in blobs: <NEW_LINE> <INDENT> text = blob.replace('\n', '') <NEW_LINE> potential_ing = re.findall(ingredient_pattern, text) <NEW_LINE> potential_comp = re.findall(compound_pattern, text) <NEW_LINE> if potential_comp and potential_comp[0] not in ignore_terms: <NEW_LINE> <INDENT> comp_q.append(potential_comp[0].strip()) <NEW_LINE> <DEDENT> if potential_ing: <NEW_LINE> <INDENT> ing_q.append(potential_ing[0].strip()) <NEW_LINE> try: <NEW_LINE> <INDENT> c1 = comp_q[0] <NEW_LINE> c2 = comp_q[1] <NEW_LINE> c3 = comp_q[2] <NEW_LINE> if c2 == c1: <NEW_LINE> <INDENT> flavor_dict[c2] = ing_q[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flavor_dict[c3] = ing_q[-1] <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> remove_terms = ['Not reported found in nature', 'n/a', 'Reported found in nature', 'Data not found', 'Reported not found in nature', 'No data found '] <NEW_LINE> flav_comp_dict = {k: v for k, v in flavor_dict.iteritems() if v not in remove_terms} <NEW_LINE> return flav_comp_dict
Creates a chemical compound to ingredient mapping from fenaroli's flavors. The ingredients are not individually extracted here, and instead are returned as a single combined string :param raw_text: single string containing entire doc :return: dictionary mapping compound:ingredient list
625941b9460517430c393ff7
def nested_flatten(t): <NEW_LINE> <INDENT> if isinstance(t, (list, tuple)): <NEW_LINE> <INDENT> for x in t: <NEW_LINE> <INDENT> yield from nested_flatten(x) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(t, dict): <NEW_LINE> <INDENT> for k, v in sorted(t.items()): <NEW_LINE> <INDENT> yield from nested_flatten(v) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yield t
Turn nested list/tuple/dict into a flat iterator.
625941b9c432627299f04aad
def test_roc_points(self): <NEW_LINE> <INDENT> tn_obs = 0 <NEW_LINE> tn_exp = 0 <NEW_LINE> fp_obs = 1 <NEW_LINE> fp_exp = 0 <NEW_LINE> tp_obs = 1 <NEW_LINE> tp_exp = 1 <NEW_LINE> fn_obs = 0 <NEW_LINE> fn_exp = 1 <NEW_LINE> obs = [tp_obs] * 63 + [fp_obs] *28 + [fn_obs] * 37 + [tn_obs]*72 <NEW_LINE> exp = [tp_exp] * 63 + [fp_exp] *28 + [fn_exp] * 37 + [tn_exp]*72 <NEW_LINE> trial_a_results = confusion_matrix_from_data(obs,exp) <NEW_LINE> self.assertEqual(trial_a_results,(63,28,37,72)) <NEW_LINE> trial_a = (obs,exp) <NEW_LINE> obs = [tp_obs] * 77 + [fp_obs] *77 + [fn_obs] * 23 + [tn_obs]*23 <NEW_LINE> exp = [tp_exp] * 77 + [fp_exp] *77 + [fn_exp] * 23 + [tn_exp]*23 <NEW_LINE> trial_b_results = confusion_matrix_from_data(obs,exp) <NEW_LINE> self.assertEqual(trial_b_results,(77,77,23,23)) <NEW_LINE> trial_b = (obs,exp) <NEW_LINE> obs = [tp_obs] * 24 + [fp_obs] *88 + [fn_obs] * 76 + [tn_obs]*12 <NEW_LINE> exp = [tp_exp] * 24 + [fp_exp] *88 + [fn_exp] * 76 + [tn_exp]*12 <NEW_LINE> trial_c_results = confusion_matrix_from_data(obs,exp) <NEW_LINE> self.assertEqual(trial_c_results,(24,88,76,12)) <NEW_LINE> trial_c_results = calculate_accuracy_stats_from_observations(obs,exp) <NEW_LINE> self.assertFloatEqual(trial_c_results["false_positive_rate"],0.88) <NEW_LINE> trial_c = (obs,exp) <NEW_LINE> trials = [trial_a, trial_b,trial_c] <NEW_LINE> obs_points = roc_points(trials) <NEW_LINE> exp_points = [(0.28,0.63),(0.77,0.77),(0.88,0.24)] <NEW_LINE> self.assertFloatEqual(obs_points,exp_points)
roc_points should calculate the points for a Receiver Operating Characteristics curve
625941b9462c4b4f79d1d539
def tier(self, *tiers): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for line in self.line_map: <NEW_LINE> <INDENT> if line.tier in tiers: <NEW_LINE> <INDENT> results.append(line) <NEW_LINE> <DEDENT> if line.multi_line_parent and line.multi_line_parent.tier in tiers: <NEW_LINE> <INDENT> results.append(line) <NEW_LINE> <DEDENT> <DEDENT> return LineRange(results)
Get all ClanLines with specified tiers Args: self: *tiers: a list of tiers, e.g. ["MAN", "FAN", "CHN Returns: LineRange with all the lines
625941b90a366e3fb873e680
def testscenario2(self): <NEW_LINE> <INDENT> user_input = ["105", "25"] <NEW_LINE> expected = "Please try again as this percentage is not valid.\nThe number of half-lives that have passed since the fossil was formed is 2.0\nThe number of years that have passed since the fossil was formed is 11460.0" <NEW_LINE> with patch("builtins.input", side_effect=user_input) as input_call: <NEW_LINE> <INDENT> with patch("sys.stdout", new=StringIO()) as output: <NEW_LINE> <INDENT> import attempt <NEW_LINE> self.assertEqual(output.getvalue().strip(), expected)
User enters [105, 25] and the program gives the correct output
625941b9627d3e7fe0d68cb7
@app.route('/consent/', methods=['GET']) <NEW_LINE> def consent(): <NEW_LINE> <INDENT> if session.get('JUDICIOUS_CONSENTED', False) is True: <NEW_LINE> <INDENT> return redirect( url_for( 'stage', _scheme='https', _external=True, **request.args)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render_template('consent.html')
Render the consent form.
625941b994891a1f4081b911
@with_setup(setup_empty) <NEW_LINE> def test_cvei(): <NEW_LINE> <INDENT> count = {"#": 0} <NEW_LINE> class MyCollector(pyblish.api.ContextPlugin): <NEW_LINE> <INDENT> order = pyblish.api.CollectorOrder <NEW_LINE> def process(self, context): <NEW_LINE> <INDENT> context.create_instance("MyInstance") <NEW_LINE> count["#"] += 1 <NEW_LINE> <DEDENT> <DEDENT> class MyValidator(pyblish.api.InstancePlugin): <NEW_LINE> <INDENT> order = pyblish.api.ValidatorOrder <NEW_LINE> def process(self, instance): <NEW_LINE> <INDENT> count["#"] += 10 <NEW_LINE> <DEDENT> <DEDENT> class MyExtractor(pyblish.api.InstancePlugin): <NEW_LINE> <INDENT> order = pyblish.api.ExtractorOrder <NEW_LINE> def process(self, instance): <NEW_LINE> <INDENT> count["#"] += 100 <NEW_LINE> <DEDENT> <DEDENT> class MyIntegrator(pyblish.api.InstancePlugin): <NEW_LINE> <INDENT> order = pyblish.api.IntegratorOrder <NEW_LINE> def process(self, instance): <NEW_LINE> <INDENT> count["#"] += 1000 <NEW_LINE> <DEDENT> <DEDENT> for Plugin in (MyCollector, MyValidator, MyExtractor, MyIntegrator): <NEW_LINE> <INDENT> pyblish.api.register_plugin(Plugin) <NEW_LINE> <DEDENT> engine = pyblish.engine.create() <NEW_LINE> engine.reset() <NEW_LINE> assert count["#"] == 0 <NEW_LINE> engine.collect() <NEW_LINE> assert count["#"] == 1 <NEW_LINE> engine.validate() <NEW_LINE> assert count["#"] == 11 <NEW_LINE> engine.extract() <NEW_LINE> assert count["#"] == 111 <NEW_LINE> engine.integrate() <NEW_LINE> assert count["#"] == 1111 <NEW_LINE> engine.publish() <NEW_LINE> assert count["#"] == 1111 <NEW_LINE> assert not engine.is_running
CVEI stages trigger plug-in in the appropriate order
625941b9adb09d7d5db6c5fc
def set_options(self, overriding_options): <NEW_LINE> <INDENT> overriding_options = overriding_options or {} <NEW_LINE> options = { 'na_value': '-', 'list_delimiter': '/', 'name_key': 'name', 'freq_key': 'frequency' } <NEW_LINE> for k, v in overriding_options.items(): <NEW_LINE> <INDENT> if k in options: <NEW_LINE> <INDENT> options[k] = v <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise common.MyError('{} is not a recognised option'.format(k)) <NEW_LINE> <DEDENT> <DEDENT> return options
Set various options to default or override in initialisation. @param overriding_options: dict of options to use instead of default values.
625941b9627d3e7fe0d68cb8
def post(self, jobid): <NEW_LINE> <INDENT> print("tools/route_start: {jobid}".format(**locals())) <NEW_LINE> worker_processes[jobid] = subprocess.Popen(["python", "-m", "cea.worker", "{jobid}".format(jobid=jobid)]) <NEW_LINE> return jobid
Start a ``cea-worker`` subprocess for the script. (FUTURE: add support for cloud-based workers
625941b94f6381625f1148af
def cat_summary(data, categorical_cols, target, number_of_classes=25): <NEW_LINE> <INDENT> var_count = 0 <NEW_LINE> vars_more_classes = [] <NEW_LINE> for var in categorical_cols: <NEW_LINE> <INDENT> if len(data[var].value_counts()) <= number_of_classes: <NEW_LINE> <INDENT> print(pd.DataFrame({var: data[var].value_counts(), "Ratio": 100 * data[var].value_counts() / len(data), "TARGET_MEAN": data.groupby(var)[target].mean()}), end="\n\n\n") <NEW_LINE> var_count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vars_more_classes.append(data[var].name) <NEW_LINE> <DEDENT> <DEDENT> print('%d categorical variables have been described' % var_count, end="\n\n") <NEW_LINE> print('There are', len(vars_more_classes), "variables have more than", number_of_classes, "classes", end="\n\n") <NEW_LINE> print('Variable names have more than %d classes:' % number_of_classes, end="\n\n") <NEW_LINE> print(vars_more_classes)
Function to create a summary table for categorical variables. :param data: dataframe for summary :param categorical_cols: list of categorical columns :param target: target variable name like 'TARGET' :param number_of_classes: number of classes, which you want to limit for variable to be categorical. :return:
625941b9de87d2750b85fbf7
@pytest.mark.parametrize('data,expected', [ ({}, False), ({"scripts": None}, False), ({"scripts": {"docs": "jsdoc2md -t ..."}}, False), ({"scripts": {"test": "echo \"Error: no test specified\" && exit 1"}}, False), ({"scripts": {"test": "tape test/*.js", "docs": "jsdoc2md -t"}}, True) ]) <NEW_LINE> def test__are_tests_implemented(data, expected): <NEW_LINE> <INDENT> dn = NpmDataNormalizer(data) <NEW_LINE> assert dn._are_tests_implemented() == expected
Test NpmDataNormalizer._are_tests_implemented().
625941b999fddb7c1c9de1fc
def checkExceptions(version, group1, group2): <NEW_LINE> <INDENT> resType1 = group1.type <NEW_LINE> resType2 = group2.type <NEW_LINE> if (resType1 == "COO" and resType2 == "ARG"): <NEW_LINE> <INDENT> exception, value = checkCooArgException(group1, group2, version) <NEW_LINE> <DEDENT> elif (resType1 == "ARG" and resType2 == "COO"): <NEW_LINE> <INDENT> exception, value = checkCooArgException(group2, group1, version) <NEW_LINE> <DEDENT> elif (resType1 == "COO" and resType2 == "COO"): <NEW_LINE> <INDENT> exception, value = checkCooCooException(group1, group2, version) <NEW_LINE> <DEDENT> elif (resType1 == "CYS" and resType2 == "CYS"): <NEW_LINE> <INDENT> exception, value = checkCysCysException(group1, group2, version) <NEW_LINE> <DEDENT> elif (resType1 == "COO" and resType2 == "HIS") or (resType1 == "HIS" and resType2 == "COO"): <NEW_LINE> <INDENT> exception, value = checkCooHisException(group1, group2, version) <NEW_LINE> <DEDENT> elif (resType1 == "OCO" and resType2 == "HIS") or (resType1 == "HIS" and resType2 == "OCO"): <NEW_LINE> <INDENT> exception, value = checkOcoHisException(group1, group2, version) <NEW_LINE> <DEDENT> elif (resType1 == "CYS" and resType2 == "HIS") or (resType1 == "HIS" and resType2 == "CYS"): <NEW_LINE> <INDENT> exception, value = checkCysHisException(group1, group2, version) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exception = False; value = None <NEW_LINE> <DEDENT> return exception, value
checks for exceptions for this version - using defaults
625941b9d4950a0f3b08c1c3
@pytest.fixture <NEW_LINE> def hand(request): <NEW_LINE> <INDENT> hh = FullTiltPokerHandHistory(request.instance.hand_text) <NEW_LINE> hh.parse() <NEW_LINE> return hh
Parse handhistory defined in hand_text class attribute and returns a FullTiltPokerHandHistory instance.
625941b9baa26c4b54cb0f8c
def working_ports(portsToTest): <NEW_LINE> <INDENT> for relayPort in portsToTest: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> req = requests.get("http://localhost:"+str(relayPort)) <NEW_LINE> if(int(req.content.decode())!=relayPort): <NEW_LINE> <INDENT> portsToTest.remove(relayPort) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> portsToTest.remove(relayPort) <NEW_LINE> print("Relay "+str(relayPort)+ " not working.") <NEW_LINE> <DEDENT> <DEDENT> return portsToTest
Checks that the port in the provided list responds correctly. Returns list of all working ports
625941b910dbd63aa1bd2a18
def addNodeAndRange(self, key, step_1, step_2): <NEW_LINE> <INDENT> assert isinstance(key, str) <NEW_LINE> assert isinstance(step_1, int) <NEW_LINE> assert isinstance(step_2, int) <NEW_LINE> node = self.addNode( key ) <NEW_LINE> node.addRange(step_1, step_2) <NEW_LINE> return node
@rtype: LocalObsdataNode
625941b9dc8b845886cb539e
def get_diff(source, dest): <NEW_LINE> <INDENT> source_dict = {record['id']: record for record in source} <NEW_LINE> dest_dict = {record['id']: record for record in dest} <NEW_LINE> source_keys = set(source_dict.keys()) <NEW_LINE> dest_keys = set(dest_dict.keys()) <NEW_LINE> to_create = source_keys - dest_keys <NEW_LINE> to_delete = dest_keys - source_keys <NEW_LINE> to_update = set() <NEW_LINE> to_check = source_keys - to_create - to_delete <NEW_LINE> for record_id in to_check: <NEW_LINE> <INDENT> new = canonical_json(source_dict[record_id]) <NEW_LINE> old = canonical_json(dest_dict[record_id]) <NEW_LINE> if new != old: <NEW_LINE> <INDENT> to_update.add(record_id) <NEW_LINE> <DEDENT> <DEDENT> return ([source_dict[k] for k in to_create], [source_dict[k] for k in to_update], [dest_dict[k] for k in to_delete])
Get the diff between two records list in this order: - to_create - to_delete
625941b9fb3f5b602dac34f8
def handle_state(self): <NEW_LINE> <INDENT> if self.state == c.REVEAL: <NEW_LINE> <INDENT> self.revealing() <NEW_LINE> <DEDENT> elif self.state == c.SLIDE: <NEW_LINE> <INDENT> self.sliding() <NEW_LINE> <DEDENT> elif self.state == c.FALL: <NEW_LINE> <INDENT> self.falling()
Manipula o comportamento com base no estado
625941b9a17c0f6771cbdebd
def click_on_close_alert_message(self): <NEW_LINE> <INDENT> self.click_element(sc.BUTTON['CLOSE_ALERT_MESSAGE']) <NEW_LINE> self.wait_for_invisibility_of_element_located(sc.BUTTON['CLOSE_ALERT_MESSAGE'])
Click on close alert message button :return:
625941b9091ae35668666dcf
def match(self, url, context): <NEW_LINE> <INDENT> for key in self.criteria.iterkeys(): <NEW_LINE> <INDENT> value = self.criteria.get(key) <NEW_LINE> if key == "permission": <NEW_LINE> <INDENT> event = context["event"] <NEW_LINE> result = self.plugin.commands.perm_handler.check( value, event.source, event.target, event.caller ) <NEW_LINE> if not result: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> if callable(value): <NEW_LINE> <INDENT> if value(getattr(url, key)): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(value, REGEX_TYPE): <NEW_LINE> <INDENT> if value.match(to_unicode(getattr(url, key))): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> elif value == getattr(url, key): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Decide whether to handle this URL. This should return True if this handler should handle the URL, or False if not. Do not do any actual handling here. You should only override this if the built-in handling doesn't cover your needs for some reason. :param url: The URL object to match :param context: Dictionary with the current context :return: True if this handler should handle the URL, False otherwise
625941b924f1403a926009d3
def _post_init(self): <NEW_LINE> <INDENT> self.load_textile_image() <NEW_LINE> self.description.grab_focus()
Handle any initialization that needs to be done after the interface is brought up. Post initalization function. This is called by _EditPrimary's init routine, and overridden in the derived class (this class).
625941b99b70327d1c4e0c3d
def get_friends(html): <NEW_LINE> <INDENT> cont = get_left(html) <NEW_LINE> if cont == '': <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> soup = BeautifulSoup(cont, 'html.parser') <NEW_LINE> try: <NEW_LINE> <INDENT> return int(soup.find_all('strong')[0].get_text()) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return 0
:param html: :return: 返回关注数
625941b963b5f9789fde6f4f
def get_base_inode(volume_name, path): <NEW_LINE> <INDENT> st_type = { stat.S_IFSOCK: 's', stat.S_IFLNK: 'l', stat.S_IFREG: '-', stat.S_IFBLK: 'b', stat.S_IFDIR: 'd', stat.S_IFCHR: 'c', stat.S_IFIFO: 'p' } <NEW_LINE> st = os.lstat(path) <NEW_LINE> tpe = st_type[stat.S_IFMT(st.st_mode)] <NEW_LINE> return { '_id': '%s:%u' % (volume_name, st.st_ino), 'dev': volume_name, 'fmt': tpe, 'uid': st.st_uid, 'gid': st.st_gid, 'size': st.st_size, 'atime': datetime.utcfromtimestamp(st.st_atime), 'mtime': datetime.utcfromtimestamp(st.st_mtime), 'ctime': datetime.utcfromtimestamp(st.st_ctime), 'chmod': get_chmod(tpe, stat.S_IMODE(st.st_mode)), 'chflags': get_chflags(st.st_flags), 'acl': get_acl(path), 'details': {} }
generate and inode object for a path
625941b9566aa707497f43e3
def numDecodings(self, s): <NEW_LINE> <INDENT> return self.helper(s, 1, {})
:type s: str :rtype: int
625941b9d18da76e2353233b
def set_MailMessageType(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'MailMessageType', value)
Set the value of the MailMessageType input for this Choreo. ((required, string) The type of message to retrieve. Valid values are: All and AskSellerQuestion. When set to AskSellerQuestion, ItemID or a date range filter must be specified.)
625941b976d4e153a657e99a
def is_zero(self): <NEW_LINE> <INDENT> return self._kernel.is_zero()
Returns if this kernel is equal to 0
625941b92eb69b55b151c714
def run_test(testnum): <NEW_LINE> <INDENT> with cons.log.section('basic'): <NEW_LINE> <INDENT> output = u_boot_console.run_command('log test %d' % testnum) <NEW_LINE> <DEDENT> split = output.replace('\r', '').splitlines() <NEW_LINE> lines = iter(split) <NEW_LINE> assert 'test %d' % testnum == next(lines) <NEW_LINE> return lines
Run a particular test number (the 'log test' command) Args: testnum: Test number to run Returns: iterator containing the lines output from the command
625941b95166f23b2e1a4fc2
@app.route('/sign-in/', methods=['POST', 'GET']) <NEW_LINE> def sign_in(): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> login_user = MONGO.db.users.find_one({'username': request.form.get('username')}) <NEW_LINE> if login_user: <NEW_LINE> <INDENT> if bcrypt.hashpw(request.form.get('password').encode('utf-8'), login_user['password']) == login_user['password']: <NEW_LINE> <INDENT> session['USERNAME'] = request.form.get('username') <NEW_LINE> return redirect(url_for('profile')) <NEW_LINE> <DEDENT> <DEDENT> return render_template('sign-in.html', failed=True) <NEW_LINE> <DEDENT> return render_template('sign-in.html', failf=False)
Checks the username and password against the database and returns the appropriate html template :param user name (str) and password (str - hashed) received from the form element :return sign-in.html if it is a GET request or if the user has wrong username or password or profile.html if the username and password is correct
625941b93617ad0b5ed67d69
def get(self, *args): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description=self.upload.__doc__, prog="conan upload") <NEW_LINE> parser.add_argument('reference', help='package recipe reference') <NEW_LINE> parser.add_argument('path', help='Path to the file or directory. If not specified will get the conafile' 'if only a reference is specified and a conaninfo.txt file contents if ' 'the package is also specified', default=None, nargs="?") <NEW_LINE> parser.add_argument("--package", "-p", default=None, help='package ID') <NEW_LINE> parser.add_argument("--remote", "-r", help='upload to this specific remote') <NEW_LINE> parser.add_argument("--raw", "-raw", help='Do not decorate the text', default=False, action='store_true') <NEW_LINE> args = parser.parse_args(*args) <NEW_LINE> ret, path = self._conan.get_path(args.reference, args.package, args.path, args.remote) <NEW_LINE> if isinstance(ret, list): <NEW_LINE> <INDENT> self._outputer.print_dir_list(ret, path, args.raw) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._outputer.print_file_contents(ret, path, args.raw) <NEW_LINE> <DEDENT> return
Gets a file or list a directory of a given reference or package
625941b9097d151d1a222cc6
def go_previous_thumbnail(self): <NEW_LINE> <INDENT> if self.current_thumbnail is not None: <NEW_LINE> <INDENT> index = self._thumbnails.index(self.current_thumbnail) - 1 <NEW_LINE> index = index if index >= 0 else len(self._thumbnails) - 1 <NEW_LINE> self.set_current_index(index)
Select the thumbnail previous to the currently selected one.
625941b973bcbd0ca4b2bee7
def __init__(self, index: Index, targets: tp.Optional[ArrayGO] = None, offset: int = 0 ): <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.targets = targets <NEW_LINE> self.offset = offset
Args: offset: integer offset for this level. targets: np.ndarray of Indices; np.array supports fancy indexing for iloc compatible usage.
625941b921bff66bcd6847bf
def get_subscription_with_http_info(self, sub_id, **kwargs): <NEW_LINE> <INDENT> all_params = ['sub_id'] <NEW_LINE> all_params.append('callback') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method get_subscription" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('sub_id' not in params) or (params['sub_id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `sub_id` when calling `get_subscription`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> resource_path = '/subscriptions/{subId}'.replace('{format}', 'json') <NEW_LINE> path_params = {} <NEW_LINE> if 'sub_id' in params: <NEW_LINE> <INDENT> path_params['subId'] = params['sub_id'] <NEW_LINE> <DEDENT> query_params = {} <NEW_LINE> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client. select_header_accept(['application/json']) <NEW_LINE> auth_settings = ['artikcloud_oauth'] <NEW_LINE> return self.api_client.call_api(resource_path, 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='SubscriptionEnvelope', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
Get Subscription Get Subscription This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_subscription_with_http_info(sub_id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str sub_id: Subscription ID. (required) :return: SubscriptionEnvelope If the method is called asynchronously, returns the request thread.
625941b9e64d504609d746aa
def get_table_list(): <NEW_LINE> <INDENT> tables = ws_tables.range('TABLES').value <NEW_LINE> table_list= {} <NEW_LINE> for t in tables: <NEW_LINE> <INDENT> worksheet_name,table_name = t[0], t[1] <NEW_LINE> if not table_list.get(worksheet_name): <NEW_LINE> <INDENT> table_list[worksheet_name] = [] <NEW_LINE> <DEDENT> table_list[worksheet_name].append(table_name) <NEW_LINE> <DEDENT> return table_list
Read the 'TABLES' table found under the hidden '_tables' worksheet. This table is populated from launcher.json file and is used to determine the table names defined under each worksheet Returns: table(dict): k,v, k=worksheet_name and v=list of tables
625941b9cc0a2c11143dcd02
def _delete_sg(self): <NEW_LINE> <INDENT> if self._sg_name not in self._conn.provisioning.get_storage_group_list(): <NEW_LINE> <INDENT> self._module.fail_json(msg="SG {} doesn't exists".format(self._sg_name)) <NEW_LINE> <DEDENT> masking_view = self._conn.provisioning. get_masking_views_from_storage_group(storagegroup=self._sg_name) <NEW_LINE> if masking_view: <NEW_LINE> <INDENT> self._message.append("Storage Group {} is Part of a Masking View". format(self._sg_name)) <NEW_LINE> self._module.fail_json(msg=self._message) <NEW_LINE> <DEDENT> self._conn.provisioning.delete_storagegroup(storagegroup_id=self._sg_name) <NEW_LINE> self._changed = True <NEW_LINE> self._message.append("SG {} has been deleted".format(self._sg_name))
Delete Storage Group :return: None
625941b9b5575c28eb68de67
def close (self): <NEW_LINE> <INDENT> self.fd.close()
All settings lost on close.
625941b991af0d3eaac9b87e
def test_api(self) -> None: <NEW_LINE> <INDENT> _runtime = {} <NEW_LINE> _last_request = None <NEW_LINE> _last_exception = None <NEW_LINE> _now = time.time() <NEW_LINE> _retry_times = 0 <NEW_LINE> while TeaCore.allow_retry(_runtime.get('retry'), _retry_times, _now): <NEW_LINE> <INDENT> if _retry_times > 0: <NEW_LINE> <INDENT> _backoff_time = TeaCore.get_backoff_time(_runtime.get('backoff'), _retry_times) <NEW_LINE> if _backoff_time > 0: <NEW_LINE> <INDENT> TeaCore.sleep(_backoff_time) <NEW_LINE> <DEDENT> <DEDENT> _retry_times = _retry_times + 1 <NEW_LINE> try: <NEW_LINE> <INDENT> _request = TeaRequest() <NEW_LINE> _last_request = _request <NEW_LINE> _response = TeaCore.do_action(_request, _runtime) <NEW_LINE> return <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if TeaCore.is_retryable(e): <NEW_LINE> <INDENT> _last_exception = e <NEW_LINE> continue <NEW_LINE> <DEDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> raise UnretryableException(_last_request, _last_exception)
testAPI
625941b950812a4eaa59c18f
def select_tab(self, window, group_id, last_index): <NEW_LINE> <INDENT> selected = False <NEW_LINE> sheets = window.sheets_in_group(group_id) <NEW_LINE> fallback_mode = get_fallback_direction() <NEW_LINE> if len(sheets) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if last_index >= 0: <NEW_LINE> <INDENT> if fallback_mode == LAST: <NEW_LINE> <INDENT> selected = self.select_last(sheets, window, last_index) <NEW_LINE> <DEDENT> elif fallback_mode == RIGHT: <NEW_LINE> <INDENT> selected = self.select_right(sheets, window, last_index) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> selected = self.select_left(sheets, window, last_index) <NEW_LINE> <DEDENT> <DEDENT> return selected
Select the desired fallback tab
625941b9a8370b771705270b
def build_url(self) -> str: <NEW_LINE> <INDENT> if self.__use_socket: <NEW_LINE> <INDENT> if self.__use_auth: <NEW_LINE> <INDENT> redis_url = 'unix://:{:s}@{:s}?db={:d}'.format(self.__auth, self.__socket, self.__db) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> redis_url = 'unix://@{:s}?db={:d}'.format(self.__socket, self.__db) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.__use_auth: <NEW_LINE> <INDENT> redis_url = 'redis://:{:s}@{:s}:{:d}/{:d}'.format(self.__auth, self.__host, self.__port, self.__db) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> redis_url = 'redis://@{:s}:{:d}/{:d}'.format(self.__host, self.__port, self.__db) <NEW_LINE> <DEDENT> <DEDENT> return redis_url
Build a redis connection URL for creating a pool :return: A connection URL :rtype: str
625941b9a79ad161976cbfaf
def __init__(self, id: str=None, title: str=None, quantity: int=None, ship_date: datetime=None, status: str=None, complete: bool=False): <NEW_LINE> <INDENT> self.swagger_types = { 'id': str, 'title': str, 'quantity': int, 'ship_date': datetime, 'status': str, 'complete': bool } <NEW_LINE> self.attribute_map = { 'id': 'id', 'title': 'title', 'quantity': 'quantity', 'ship_date': 'shipDate', 'status': 'status', 'complete': 'complete' } <NEW_LINE> self._id = id <NEW_LINE> self._title = title <NEW_LINE> self._quantity = quantity <NEW_LINE> self._ship_date = ship_date <NEW_LINE> self._status = status <NEW_LINE> self._complete = complete
Project - a model defined in Swagger :param id: The id of this Project. # noqa: E501 :type id: str :param title: The title of this Project. # noqa: E501 :type title: str :param quantity: The quantity of this Project. # noqa: E501 :type quantity: int :param ship_date: The ship_date of this Project. # noqa: E501 :type ship_date: datetime :param status: The status of this Project. # noqa: E501 :type status: str :param complete: The complete of this Project. # noqa: E501 :type complete: bool
625941b9046cf37aa974cbb5
def register_controller(self, controller): <NEW_LINE> <INDENT> controller.add_routes(self)
Execute add_routes function within controllers to register routes.
625941b9d10714528d5ffb49
def get_data_dir(data_dir): <NEW_LINE> <INDENT> src_dir = "%s/true_labels" % data_dir <NEW_LINE> return src_dir
Get source data's directory :return:
625941b9283ffb24f3c55776
def main(): <NEW_LINE> <INDENT> year = time.strftime('%Y') <NEW_LINE> month = time.strftime('%m') <NEW_LINE> today = time.strftime('%Y%m%d') <NEW_LINE> homedir = '/home/pi/raspi-sump/' <NEW_LINE> webchart.create_folders(year, month, homedir) <NEW_LINE> webchart.create_chart(homedir) <NEW_LINE> webchart.copy_chart(year, month, today, homedir)
Pass variables to webchart.py
625941b9dd821e528d63b015