code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def GetArtifactKnowledgeBase(client_obj, allow_uninitialized=False): <NEW_LINE> <INDENT> client_schema = client_obj.Schema <NEW_LINE> kb = client_obj.Get(client_schema.KNOWLEDGE_BASE) <NEW_LINE> if not allow_uninitialized: <NEW_LINE> <INDENT> if not kb: <NEW_LINE> <INDENT> raise artifact_lib.KnowledgeBaseUninitializedError( "KnowledgeBase empty for %s." % client_obj.urn) <NEW_LINE> <DEDENT> if not kb.os: <NEW_LINE> <INDENT> raise artifact_lib.KnowledgeBaseAttributesMissingError( "KnowledgeBase missing OS for %s. Knowledgebase content: %s" % (client_obj.urn, kb)) <NEW_LINE> <DEDENT> <DEDENT> if not kb: <NEW_LINE> <INDENT> kb = client_schema.KNOWLEDGE_BASE() <NEW_LINE> SetCoreGRRKnowledgeBaseValues(kb, client_obj) <NEW_LINE> <DEDENT> if kb.os == "Windows": <NEW_LINE> <INDENT> if not kb.environ_allusersappdata and kb.environ_allusersprofile: <NEW_LINE> <INDENT> if kb.os_major_version >= 6: <NEW_LINE> <INDENT> kb.environ_allusersappdata = u"c:\\programdata" <NEW_LINE> kb.environ_allusersprofile = u"c:\\programdata" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kb.environ_allusersappdata = (u"c:\\documents and settings\\All Users\\" "Application Data") <NEW_LINE> kb.environ_allusersprofile = u"c:\\documents and settings\\All Users" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return kb
This generates an artifact knowledge base from a GRR client. Args: client_obj: A GRRClient object which is opened for reading. allow_uninitialized: If True we accept an uninitialized knowledge_base. Returns: A KnowledgeBase semantic value. Raises: ArtifactProcessingError: If called when the knowledge base has not been initialized. KnowledgeBaseUninitializedError: If we failed to initialize the knowledge base. This is needed so that the artifact library has a standardized interface to the data that is actually stored in the GRRClient object in the GRR datastore. We expect that the client KNOWLEDGE_BASE is already filled out through the, KnowledgeBaseInitialization flow, but attempt to make some intelligent guesses if things failed.
625941b945492302aab5e141
@dbus.service.method(SETTINGS_IFACE, in_signature='a{sa{sv}}', out_signature='o') <NEW_LINE> def SettingsAddConnection(self, connection_settings): <NEW_LINE> <INDENT> NM = dbusmock.get_object(MAIN_OBJ) <NEW_LINE> devices = NM.GetDevices() <NEW_LINE> dev = None <NEW_LINE> auto_connect = False <NEW_LINE> if 'autoconnect' in connection_settings['connection']: <NEW_LINE> <INDENT> auto_connect = connection_settings['connection']['autoconnect'] <NEW_LINE> <DEDENT> if auto_connect and len(devices) > 0: <NEW_LINE> <INDENT> dev = devices[0] <NEW_LINE> <DEDENT> connection_path = self.AddDeviceConnection(dev, connection_settings) <NEW_LINE> if auto_connect and dev: <NEW_LINE> <INDENT> activate_connection(NM, connection_path, dev, connection_path) <NEW_LINE> <DEDENT> return connection_path
Add a connection. connection_settings is a String String Variant Map Map. See https://developer.gnome.org/NetworkManager/0.9/spec.html #type-String_String_Variant_Map_Map If you omit connection uuid or timestamp, this method adds one for you. Note that this automatically associates the connection settings object with the first device that was created.
625941b9711fe17d825421f3
def connect(self, nodelist, threshold=15.0): <NEW_LINE> <INDENT> for i, end in enumerate(self.ends): <NEW_LINE> <INDENT> neighbours = [] <NEW_LINE> for node in nodelist: <NEW_LINE> <INDENT> distlist = [sp.dist(end, point) for point in node.pointlist] <NEW_LINE> mindist = min(distlist) <NEW_LINE> if mindist < threshold: <NEW_LINE> <INDENT> neighbours.append([node, mindist]) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> mindist_idx = np.argmin(neighbours, axis=0)[1] <NEW_LINE> connectnode = neighbours[mindist_idx][0] <NEW_LINE> angle = self.calculateAngle(end, connectnode.centre) <NEW_LINE> self.connections.append([connectnode, angle, i, 0]) <NEW_LINE> yield [connectnode, angle, i, 0] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass
Connect the wire with nodes. For each end of the wire, check the distance between the end position and every node's vertex position. If the distance is under some threshold, consider this node is a neighbour and store it in a list. After checking with all the nodes in the nodelist, choose the node with least distance as the connection of this end. Finally in the connection attribute, each element will contain the connected node, the angle between the node and the end, and which end fo the wire this connection belongs to. Args: nodelist (list): A list of node objects including dots and morphisms, threshold (float): The threshold value of distance. Raise: Exception: The neighbour list could be blank if the wire doesn't connect with any nodes.
625941b96fece00bbac2d5bc
def main(): <NEW_LINE> <INDENT> list1 = [20, 6, 12, -3, 14] <NEW_LINE> result_list = greater_than_10(list1) <NEW_LINE> print(result_list) <NEW_LINE> list2 = [16] <NEW_LINE> result_list = greater_than_10(list2) <NEW_LINE> print(result_list) <NEW_LINE> list3 = [1, 2, 3, 4] <NEW_LINE> result_list = greater_than_10(list3) <NEW_LINE> print(result_list) <NEW_LINE> list4 = [] <NEW_LINE> result_list = greater_than_10(list4) <NEW_LINE> print(result_list)
This program prints a list containing only those numbers from generated list that have a value greater than 10.
625941b94e696a04525c92d3
def visit_next_url(self): <NEW_LINE> <INDENT> url = self.url_follow_queue.popleft() <NEW_LINE> r = self.session.get(url) <NEW_LINE> for m in re.finditer( r"""(https?://[^\s<>]+)|href=['"]([^"']+)|src=['"]([^"']+)""", r.text): <NEW_LINE> <INDENT> for g in m.groups(): <NEW_LINE> <INDENT> if g: <NEW_LINE> <INDENT> logging.debug("raw link %s", g) <NEW_LINE> new_url = urljoin(url, g) <NEW_LINE> logging.debug("corrected link %s", new_url) <NEW_LINE> if urlparse(new_url).netloc != urlparse(url).netloc: <NEW_LINE> <INDENT> logging.debug("netloc change") <NEW_LINE> if not self.config["allow_netloc_change"]: <NEW_LINE> <INDENT> logging.debug("not following to different netloc") <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> self.process_found_url(new_url)
Pop the next url, retrieve it and scan the content for further links.
625941b9cad5886f8bd26e63
def file_contains_header(line): <NEW_LINE> <INDENT> header = get_file_header(line) <NEW_LINE> if header is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> function_gen = parse_functions_with_bodies(line) <NEW_LINE> first_function = None <NEW_LINE> try: <NEW_LINE> <INDENT> first_function = next(function_gen) <NEW_LINE> <DEDENT> except StopIteration as e: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if first_function.span()[0] <= header.span()[0]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
Return True if the file contains a comment at the start of the the file, where the comment is not associated with a function.
625941b97cff6e4e81117806
def average_daily_use(self, station_name): <NEW_LINE> <INDENT> if self.target_data is None: <NEW_LINE> <INDENT> self.get_targets() <NEW_LINE> <DEDENT> self.current_station = station_name <NEW_LINE> station = self.target_data.query(f'station == "{station_name}"') <NEW_LINE> self.daily_use = (station .groupby([station.index.weekday, station.index.time]) .mean())
Determine the average turnstile use per day for a station. :param str station_name: station name
625941b926068e7796caeb59
def log_out(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove('token') <NEW_LINE> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.login_frame_init()
登出操作
625941b99c8ee82313fbb5f5
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Img, self).save(*args, **kwargs) <NEW_LINE> url = self.imgfile.url <NEW_LINE> image = Image.open(url[1:]) <NEW_LINE> width, height = image.size <NEW_LINE> if width/height >= 1: <NEW_LINE> <INDENT> size = (1280,1280) <NEW_LINE> size_m = (700,700) <NEW_LINE> size_s = (300,300) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> size = (1000,1000) <NEW_LINE> size_m = (500,500) <NEW_LINE> size_s = (200,200) <NEW_LINE> <DEDENT> image.thumbnail(size, Image.ANTIALIAS) <NEW_LINE> image.save(url[1:]) <NEW_LINE> url_m = self.imgfile_m.url <NEW_LINE> image_m = Image.open(url[1:]) <NEW_LINE> image_m.thumbnail(size_m, Image.ANTIALIAS) <NEW_LINE> image_m.save(url_m[1:]) <NEW_LINE> url_s = self.imgfile_s.url <NEW_LINE> image_s = Image.open(url_s[1:]) <NEW_LINE> image_s.thumbnail(size_s, Image.ANTIALIAS) <NEW_LINE> image_s.save(url_s[1:])
Save Photo after ensuring it is not blank. Resize as needed.
625941b91f037a2d8b94607f
def unpatch(): <NEW_LINE> <INDENT> if not getattr(pymemcache.client, '_datadog_patch', False): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> setattr(pymemcache.client, '_datadog_patch', False) <NEW_LINE> setattr(pymemcache.client.base, 'Client', _Client) <NEW_LINE> setattr(pymemcache, _DD_PIN_NAME, None) <NEW_LINE> setattr(pymemcache, _DD_PIN_PROXY_NAME, None)
Remove pymemcache tracing
625941b97c178a314d6ef2da
def test_10_invalid_http_timeout(self): <NEW_LINE> <INDENT> for invalid_time in ["cats", "0", "-1"]: <NEW_LINE> <INDENT> ret, output, err = self.sysrepo("-T {0}".format(invalid_time), out=True, stderr=True, exit=1) <NEW_LINE> self.assert_("http_timeout" in err, "error message " "did not contain http_timeout: {0}".format(err))
We return an error given an invalid http_timeout
625941b9f9cc0f698b140486
def _validate_timing_resolution(self, timing_resolution): <NEW_LINE> <INDENT> if timing_resolution is None: <NEW_LINE> <INDENT> timing_resolution = defaults.timing_resolution <NEW_LINE> <DEDENT> if not isinstance(timing_resolution, datetime.timedelta): <NEW_LINE> <INDENT> raise TypeError( '%s.timing_resolution should be an instance of ' 'datetime.timedelta not, %s' % (self.__class__.__name__, timing_resolution.__class__.__name__) ) <NEW_LINE> <DEDENT> return timing_resolution
validates the given timing_resolution value
625941b9cdde0d52a9e52eb0
def __str__(self): <NEW_LINE> <INDENT> return '\nsrc: {0} dst: {1} payload: {2}'.format(self.src, self.dst, self.payload)
Job: Prints the source address, destination address, and payload of a packet
625941b9ac7a0e7691ed3f5a
def canCompleteCircuit(self, gas, cost): <NEW_LINE> <INDENT> if len(gas) == 1: <NEW_LINE> <INDENT> if gas[0] - cost[0] >= 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> tank = gas[0] - cost[0] <NEW_LINE> begin = 0 <NEW_LINE> p = 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> if p != begin: <NEW_LINE> <INDENT> if tank >= 0: <NEW_LINE> <INDENT> tank += gas[p] - cost[p] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> begin = p <NEW_LINE> if begin == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> tank = gas[begin] - cost[begin] <NEW_LINE> <DEDENT> p = (p+1) % len(gas) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if tank >= 0: <NEW_LINE> <INDENT> return begin <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return -1
:type gas: List[int] :type cost: List[int] :rtype: int
625941b9851cf427c661a393
def make_image(screen, bgcolor, filename): <NEW_LINE> <INDENT> img = Image.new('RGB', screen,bgcolor) <NEW_LINE> draw = ImageDraw.Draw(img) <NEW_LINE> draw.ellipse((20,20,204,204), outline=(225, 225, 225), fill=(255, 255, 255)) <NEW_LINE> del draw <NEW_LINE> img.save("./result/mask_cir01.jpg") <NEW_LINE> img.save("./result/mask_cir.png") <NEW_LINE> return
画像の作成
625941b9d164cc6175782bcf
def recv(timeout=False): <NEW_LINE> <INDENT> global rdx <NEW_LINE> if timeout is False: <NEW_LINE> <INDENT> outmsg = rdx.rpop("jabber_in") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outmsg = rdx.brpop("jabber_in", timeout) <NEW_LINE> <DEDENT> return outmsg
check for incoming messages, blocking for specified amount of time timeout=False -- No waiting/blocking timeout=0 -- Block forever until a message is received timeout=X -- Where X is a positive integer, block for that number of seconds
625941b9fff4ab517eb2f2ba
def test_simple_case(): <NEW_LINE> <INDENT> b = [2, 3, 4] <NEW_LINE> assert foo() in b
straight forward in pytest
625941b926068e7796caeb5a
def itertuples(self, index=True): <NEW_LINE> <INDENT> arrays = [] <NEW_LINE> if index: <NEW_LINE> <INDENT> arrays.append(self.index) <NEW_LINE> <DEDENT> arrays.extend(self.iloc[:, k] for k in range(len(self.columns))) <NEW_LINE> return zip(*arrays)
Iterate over rows of DataFrame as tuples, with index value as first element of the tuple
625941b9d268445f265b4cf4
def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> if args or kwds: <NEW_LINE> <INDENT> super(FileRemoveRequest, self).__init__(*args, **kwds) <NEW_LINE> if self.file_path is None: <NEW_LINE> <INDENT> self.file_path = '' <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.file_path = ''
Constructor. Any message fields that are implicitly/explicitly set to None will be assigned a default value. The recommend use is keyword arguments as this is more robust to future message changes. You cannot mix in-order arguments and keyword arguments. The available fields are: file_path :param args: complete set of field values, in .msg order :param kwds: use keyword arguments corresponding to message field names to set specific fields.
625941b90383005118ecf465
def poll_lock_server(self): <NEW_LINE> <INDENT> is_locked = True <NEW_LINE> while(is_locked): <NEW_LINE> <INDENT> request_arg = {'file_path': self.file_path, 'client_id': self.client_id} <NEW_LINE> response = requests.get(LOCK_SERVER_ADDR, request_arg) <NEW_LINE> data = response.json() <NEW_LINE> if data['is_locked']: <NEW_LINE> <INDENT> print("File {0} is locked. Polling again".format(self.file_path)) <NEW_LINE> is_locked = True <NEW_LINE> time.sleep(2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_locked = False <NEW_LINE> <DEDENT> <DEDENT> return
poll lock server until file become unlocked
625941b94428ac0f6e5ba673
def test_forward_backward(self): <NEW_LINE> <INDENT> y = [4., 5., 6., 7.] <NEW_LINE> solver = solvers.forward_backward(accel=acceleration.dummy()) <NEW_LINE> param = {'solver': solver, 'rtol': 1e-6, 'verbosity': 'NONE'} <NEW_LINE> f1 = functions.norm_l2(y=y) <NEW_LINE> f2 = functions.dummy() <NEW_LINE> ret = solvers.solve([f1, f2], np.zeros(len(y)), **param) <NEW_LINE> nptest.assert_allclose(ret['sol'], y) <NEW_LINE> self.assertEqual(ret['crit'], 'RTOL') <NEW_LINE> self.assertEqual(ret['niter'], 35) <NEW_LINE> f1 = functions.norm_l1(y=y, lambda_=1.0) <NEW_LINE> f2 = functions.norm_l2(y=y, lambda_=0.8) <NEW_LINE> ret = solvers.solve([f1, f2], np.zeros(len(y)), **param) <NEW_LINE> nptest.assert_allclose(ret['sol'], y) <NEW_LINE> self.assertEqual(ret['crit'], 'RTOL') <NEW_LINE> self.assertEqual(ret['niter'], 4) <NEW_LINE> f3 = functions.dummy() <NEW_LINE> x0 = np.zeros((4,)) <NEW_LINE> self.assertRaises(ValueError, solver.pre, [f1, f2, f3], x0)
Test forward-backward splitting algorithm without acceleration, and with L1-norm, L2-norm, and dummy functions.
625941b9de87d2750b85fc0f
def check2(): <NEW_LINE> <INDENT> g_algo = GraphAlgo() <NEW_LINE> file = '../data/A5' <NEW_LINE> g_algo.load_from_json(file) <NEW_LINE> g_algo.get_graph().remove_edge(13, 14) <NEW_LINE> g_algo.save_to_json(file + "_edited") <NEW_LINE> dist, path = g_algo.shortest_path(1, 7) <NEW_LINE> print(dist, path) <NEW_LINE> dist, path = g_algo.shortest_path(47, 19) <NEW_LINE> print(dist, path) <NEW_LINE> dist, path = g_algo.shortest_path(20, 2) <NEW_LINE> print(dist, path) <NEW_LINE> dist, path = g_algo.shortest_path(2, 20) <NEW_LINE> print(dist, path) <NEW_LINE> print(g_algo.connected_component(0)) <NEW_LINE> print(g_algo.connected_components()) <NEW_LINE> g_algo.plot_graph()
This function tests the naming, basic testing over A5 json file. :return:
625941b93317a56b86939aea
def open_restaurant(self): <NEW_LINE> <INDENT> print(self.restaurant_name.title() + " is opening") <NEW_LINE> pass
欢迎
625941b999fddb7c1c9de214
def _get_drag_factor(self, brake_angle): <NEW_LINE> <INDENT> return DRAG_FACTOR * (1 + DRAG_GAIN * np.sin(brake_angle)**2)
Map from drag brake angle to drag factor brake_angle: (rad) returns: velocity (m/s)
625941b915fb5d323cde098b
def save_data(users, posts): <NEW_LINE> <INDENT> info_dict = { 'users': [user.convert_to_dict() for user in users], 'posts': [post.convert_to_dict() for post in posts] } <NEW_LINE> with open("data.json", 'w') as file_: <NEW_LINE> <INDENT> json.dump(json.dumps(info_dict), file_)
Saves all information, including users and posts, to the data.json file The passwords are kept "safe"! users => The list of users you want to save. Type: List[User] posts => The list of posts you want to save. Type: List[Post] Returns: None
625941b9462c4b4f79d1d551
@named('show-backup') <NEW_LINE> @arg('server_name', completer=server_completer, help='specifies the server name for the command') <NEW_LINE> @arg('backup_id', completer=backup_completer, help='specifies the backup ID') <NEW_LINE> @expects_obj <NEW_LINE> def show_backup(args): <NEW_LINE> <INDENT> server = get_server(args) <NEW_LINE> backup_info = parse_backup_id(server, args) <NEW_LINE> server.show_backup(backup_info) <NEW_LINE> output.close_and_exit()
This method shows a single backup information
625941b921a7993f00bc7b6b
def load_forms( ciks: tp.Union[None, tp.List[str]] = None, types: tp.Union[None, tp.List[str]] = None, facts: tp.Union[None, tp.List[str]] = None, skip_segment: bool = False, min_date: tp.Union[str, datetime.date] = None, max_date: tp.Union[str, datetime.date, None] = None, tail: tp.Union[datetime.timedelta, float, int] = None ) -> tp.Generator[dict, None, None]: <NEW_LINE> <INDENT> track_event("DATA_SECGOV_FORMS") <NEW_LINE> max_date = parse_date(max_date) <NEW_LINE> if min_date is not None: <NEW_LINE> <INDENT> min_date = parse_date(min_date) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> min_date = max_date - tail <NEW_LINE> <DEDENT> params = { 'ciks': list(set(ciks)) if ciks is not None else None, 'types': list(set(types)) if types is not None else None, 'facts': list(set(facts)) if facts is not None else None, 'skip_segment': skip_segment, 'min_date': min_date.isoformat(), 'max_date': max_date.isoformat() } <NEW_LINE> go = True <NEW_LINE> while go: <NEW_LINE> <INDENT> params_js = json.dumps(params) <NEW_LINE> raw = request_with_retry("sec.gov/forms", params_js.encode()) <NEW_LINE> js = raw.decode() <NEW_LINE> forms = json.loads(js) <NEW_LINE> for f in forms: <NEW_LINE> <INDENT> yield f <NEW_LINE> <DEDENT> go = len(forms) > 0 <NEW_LINE> params['offset'] = params.get('offset', 0) + len(forms)
Load SEC Forms (Fundamental data) :param ciks: list of cik (you can get cik from asset id) :param types: list of form types: ['10-K', '10-Q', '10-K/A', '10-Q/A'] :param facts: list of facts for extraction, for example: ['us-gaap:Goodwill'] :param skip_segment: skip facts with segment :param min_date: min form date :param max_date: max form date :param tail: datetime.timedelta, tail size of data. min_date = max_date - tail :return: generator
625941b9b830903b967e9797
@task(help={ "stack-name": "The name to prefix before the stack.", "subdomain": "The subdomain to configure. (e.g. www)", "cert-arn": "A valid certificate ARN.", "profile": "A valid AWS profile." }) <NEW_LINE> def update_monitoring(c, stack_name, subdomain, profile, cert_arn=None, create=False): <NEW_LINE> <INDENT> action = 'create' if create else 'update' <NEW_LINE> with chdir(WORKING_DIR): <NEW_LINE> <INDENT> aws('cloudformation', f'{action}-stack', '--stack-name', f'{stack_name}-monitoring', '--template-body', f'file://monitoring.yaml', '--capabilities', 'CAPABILITY_NAMED_IAM', '--parameters', f'ParameterKey=Subdomain,ParameterValue={subdomain}', f'ParameterKey=CertificateArn,ParameterValue={cert_arn if cert_arn else ""}', f'--profile', f'{profile}')
Create or update monitoring CloudFormation stack
625941b929b78933be1e553a
def find_sub_sum(X, t): <NEW_LINE> <INDENT> if len(X) == 0: <NEW_LINE> <INDENT> if t == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> Xp = find_sub_sum(X[1:], t) <NEW_LINE> if Xp != None: <NEW_LINE> <INDENT> return Xp <NEW_LINE> <DEDENT> Xp = find_sub_sum(X[1:], t-X[0]) <NEW_LINE> if Xp != None: <NEW_LINE> <INDENT> return [X[0]]+Xp <NEW_LINE> <DEDENT> return
find subset of X that sums up to t
625941b9be7bc26dc91cd486
def calculate_bld_up_area(bldg_occsolid_list, flr2flr_height): <NEW_LINE> <INDENT> flr_area_list = [] <NEW_LINE> for bldg_occsolid in bldg_occsolid_list: <NEW_LINE> <INDENT> flr_area = calculate_bldg_flr_area(bldg_occsolid, flr2flr_height) <NEW_LINE> flr_area_list.append(flr_area) <NEW_LINE> <DEDENT> return sum(flr_area_list)
This function calculates the total floor area of all the buildings. Parameters ---------- bldg_occsolid_list : list of OCCsolids The list of OCCsolids that are buildings to be calculated. flr2flr_height : float The floor to floor height the building. Returns ------- total_bldg_flr_area : float The total floor area of all the buildings.
625941b9e1aae11d1e749b35
def _read_ifd(self, target, valid_tags, offset=-1): <NEW_LINE> <INDENT> self._reads.seek(self._base + offset, os.SEEK_SET) <NEW_LINE> if not self.big: <NEW_LINE> <INDENT> data = self._reads.read(2) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> count = struct.unpack('%sH' % self.endian, data)[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = self._reads.read(8) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> count = struct.unpack('%sQ' % self.endian, data)[0] <NEW_LINE> <DEDENT> for _x in range(count): <NEW_LINE> <INDENT> self._read_ifd_entry(target, valid_tags) <NEW_LINE> <DEDENT> return self._read_offset()
Read an IFD and any sub-IFDs
625941b976d4e153a657e9b1
def crop_first_frame(self): <NEW_LINE> <INDENT> frame = self.webcam.single_pic_array() <NEW_LINE> crop_inst = images.CropShape(frame, self.no_of_sides) <NEW_LINE> mask, crop, points, _ = crop_inst.begin_crop() <NEW_LINE> return mask, crop, points
Finds the mask, crop and selection points for first frame
625941b9d99f1b3c44c67417
def setSpacing(self, *args): <NEW_LINE> <INDENT> return _osgManipulator.GridConstraint_setSpacing(self, *args)
setSpacing(GridConstraint self, Vec3d spacing)
625941b92c8b7c6e89b35644
def create_menu(self, state): <NEW_LINE> <INDENT> def file_save(): <NEW_LINE> <INDENT> file = tkinter.filedialog.asksaveasfile(mode='w', defaultextension='.txt') <NEW_LINE> if file: <NEW_LINE> <INDENT> file.write(parse.save(self.colorer.countries)) <NEW_LINE> file.close() <NEW_LINE> <DEDENT> <DEDENT> def file_load(): <NEW_LINE> <INDENT> file = tkinter.filedialog.askopenfile() <NEW_LINE> if file: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.colorer.countries = parse.load(file) <NEW_LINE> <DEDENT> except (FileNotFoundError, ValueError, IndexError): <NEW_LINE> <INDENT> self.colorer.countries = parse.load(COUNTRIES.splitlines()) <NEW_LINE> <DEDENT> file.close() <NEW_LINE> self.colorer.set_colors() <NEW_LINE> self.draw_countries() <NEW_LINE> <DEDENT> <DEDENT> def set_algorithm(number): <NEW_LINE> <INDENT> if not self.locked: <NEW_LINE> <INDENT> self.colorer.algorithm = number <NEW_LINE> for country in self.colorer.countries.items: <NEW_LINE> <INDENT> country[1] = None <NEW_LINE> <DEDENT> threading.Thread(target=self.set_colors_async, daemon=True).start() <NEW_LINE> <DEDENT> <DEDENT> def about(): <NEW_LINE> <INDENT> tkinter.messagebox.showinfo('Info', 'Map coloring') <NEW_LINE> <DEDENT> file_menu = tkinter.Menu(self.menu, tearoff=0) <NEW_LINE> file_menu.add_command(label="Load", command=file_load, state=state) <NEW_LINE> file_menu.add_command(label="Save", command=file_save, state=state) <NEW_LINE> file_menu.add_separator() <NEW_LINE> file_menu.add_command(label="Exit", command=self.root.quit) <NEW_LINE> self.menu.add_cascade(label="File", menu=file_menu) <NEW_LINE> algorithm_menu = tkinter.Menu(self.menu, tearoff=0) <NEW_LINE> algorithm_menu.add_radiobutton(label="algo0", command=lambda: set_algorithm(0), state=state) <NEW_LINE> algorithm_menu.add_radiobutton(label="algo1", command=lambda: set_algorithm(1), state=state) <NEW_LINE> self.menu.add_cascade(label="Algorithms", menu=algorithm_menu) <NEW_LINE> help_menu = tkinter.Menu(self.menu, tearoff=0) <NEW_LINE> help_menu.add_command(label="About", command=about) <NEW_LINE> self.menu.add_cascade(label="Help", menu=help_menu)
Creates menu. :param state: tkinter.DISABLED or tkinter.ACTIVE :return: None
625941b90c0af96317bb806a
def docify(input_folder, output_path, count=-1, content_column="content", source_column="publication", keywords=[], ignore_source=[], overwrite=False): <NEW_LINE> <INDENT> logging.info("document data requested for '%s' dataset at '%s'...", input_folder, output_path) <NEW_LINE> if not utility.check_output_necessary(output_path, overwrite): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> logging.info("Loading article data...") <NEW_LINE> article_table = None <NEW_LINE> for filename in tqdm(os.listdir(input_folder)): <NEW_LINE> <INDENT> if filename.endswith(".csv"): <NEW_LINE> <INDENT> logging.debug("Loading '%s'...", filename) <NEW_LINE> article_table_in = pd.read_csv(input_folder + "/" + filename) <NEW_LINE> if article_table is None: <NEW_LINE> <INDENT> article_table = article_table_in <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> article_table = pd.concat([article_table, article_table_in]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for source in ignore_source: <NEW_LINE> <INDENT> logging.info("Ignoring source %s...", source) <NEW_LINE> article_table = article_table[article_table[source_column] != source] <NEW_LINE> <DEDENT> if len(keywords) > 0 and keywords[0] != "": <NEW_LINE> <INDENT> tmp_table = pd.DataFrame(columns=article_table.columns) <NEW_LINE> logging.info("Targeting document set to keywords %s...", str(keywords)) <NEW_LINE> for word in keywords: <NEW_LINE> <INDENT> tmp_table = pd.concat([tmp_table, article_table[article_table[content_column].str.contains(word)]]) <NEW_LINE> <DEDENT> tmp_table = tmp_table.drop_duplicates() <NEW_LINE> article_table = tmp_table.copy() <NEW_LINE> <DEDENT> if count != -1: <NEW_LINE> <INDENT> logging.info("Shuffling %i subset of documents for output...", count) <NEW_LINE> article_table = article_table.sample(count, random_state=42) <NEW_LINE> <DEDENT> logging.info("Grabbing articles...") <NEW_LINE> documents = [] <NEW_LINE> for (idx, row) in tqdm(article_table.iterrows()): <NEW_LINE> <INDENT> if count != -1 and len(documents) > count: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> documents.append({"text": row.loc[content_column], "source": row.loc[source_column]}) <NEW_LINE> <DEDENT> logging.info("Saving document data to '%s'", output_path) <NEW_LINE> with open(output_path, 'w') as outfile: <NEW_LINE> <INDENT> json.dump(documents, outfile) <NEW_LINE> <DEDENT> logging.info("document data saved to '%s'", output_path)
Create a file of documents from all csv files in a folder A count of -1 means output _all_ documents. Input_folder assumes no trailling /
625941b971ff763f4b549510
def match(obj, eng): <NEW_LINE> <INDENT> model = eng.workflow_definition.model(obj) <NEW_LINE> record = get_record_from_model(model) <NEW_LINE> response = list( set(match_by_arxiv_id(record)) | set(match_by_doi(record)) ) <NEW_LINE> if response: <NEW_LINE> <INDENT> obj.extra_data['recid'] = response[0] <NEW_LINE> obj.extra_data['url'] = os.path.join( cfg["CFG_ROBOTUPLOAD_SUBMISSION_BASEURL"], 'record', str(response[0]) ) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False
Return True if the record already exists in INSPIRE. Searches by arXiv identifier and DOI, updates extra_data with the first id returned by the search.
625941b9baa26c4b54cb0fa4
def get(self, who_am_i): <NEW_LINE> <INDENT> self.__check('who_am_i', who_am_i) <NEW_LINE> if who_am_i == 'parent': <NEW_LINE> <INDENT> if not self.__in_queue.empty(): <NEW_LINE> <INDENT> return self.__in_queue.get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> if who_am_i == 'child': <NEW_LINE> <INDENT> if not self.__out_queue.empty(): <NEW_LINE> <INDENT> return self.__out_queue.get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Get value from queue :type who_am_i: str :return:
625941b910dbd63aa1bd2a30
def testBasicAsciiDiff(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dcA = DataCategory("A", self.__attributeList, self.__rowListAsciiA) <NEW_LINE> dcB = DataCategory("A", self.__attributeList, self.__rowListAsciiB) <NEW_LINE> self.assertEqual(dcA, dcA) <NEW_LINE> self.assertIs(dcA, dcA) <NEW_LINE> self.assertEqual(dcB, dcB) <NEW_LINE> self.assertNotEqual(dcA, dcB) <NEW_LINE> self.assertIsNot(dcA, dcB) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.exception("Failing with %s", str(e)) <NEW_LINE> self.fail()
Test case - __eq__ and __ne__ methods
625941b9462c4b4f79d1d552
def updatePlots(self): <NEW_LINE> <INDENT> t = self.pip.TPData["time"] <NEW_LINE> rss = self.pip.TPData["Rss"] <NEW_LINE> peak = self.pip.TPData["Rpeak"] <NEW_LINE> self.leftPlot.plot(t, rss, clear=True) <NEW_LINE> self.rightPlot.plot(t, peak, clear=True)
Update the pipette data plots.
625941b9d18da76e23532354
def get_captcha_client_cache_key(client_id): <NEW_LINE> <INDENT> return "captcha:client_id:{0}".format(client_id)
将 client 转换为 cache 存储的 key :param client_id: :return:
625941b93617ad0b5ed67d81
def get_status_message(): <NEW_LINE> <INDENT> global _status_message <NEW_LINE> return _status_message
Get current status message. :returns: Current status message. :rtype: string >>> sublime.status_message("SomePlugin: working hard") >>> sublime.get_status_message() 'SomePlugin: working hard'
625941b976d4e153a657e9b2
def fill_out_mailing_detail(self, mailing_detail=None): <NEW_LINE> <INDENT> if mailing_detail is not None: <NEW_LINE> <INDENT> return self.mailing_detail_field.set_text(mailing_detail)
Fill out mailing detail. :param mailing_detail: :return:
625941b9d6c5a10208143ec9
def filter_and_sort(items, filterfn, keyfn=None, reverse=False) : <NEW_LINE> <INDENT> temp = [i for i in items if filterfn(i)] <NEW_LINE> if keyfn : <NEW_LINE> <INDENT> temp.sort(key=keyfn, reverse=reverse) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> random.shuffle(temp) <NEW_LINE> <DEDENT> return temp
filter some items from a list and sort the rest.
625941b9099cdd3c635f0ade
def test_create_state(self): <NEW_LINE> <INDENT> for state in ('INCOMING', 'FAILED', 'RUNNING'): <NEW_LINE> <INDENT> _ = _JobState(state) <NEW_LINE> <DEDENT> self.assertRaises(InvalidStateError, _JobState, 'garbage')
Check making new _JobState objects
625941b931939e2706e4ccf1
def geometry_types(self, entity, col): <NEW_LINE> <INDENT> col_obj = entity.columns[col] <NEW_LINE> if isinstance(col_obj, GeometryColumn): <NEW_LINE> <INDENT> geometry_type = col_obj.geometry_type() <NEW_LINE> return geometry_type
Check the column geometry type :param entity: :type Entity :param col: :type string :return: geom :rtype string
625941b90383005118ecf466
def register_specific(self, name: str, cmdfunc: SpecificCommandFunction, args: bool = True ) -> None: <NEW_LINE> <INDENT> command = SpecificCommand(name, cmdfunc, args) <NEW_LINE> self.register(command)
Register a function as specific bot command (i. e. @mention of the bot nick after the !command is required). This function will be called by process_commands() when the bot encounters a matching command. name - the name of the command (see register_general() for an explanation) cmdfunc - the function that is called with the Room, LiveMessage and SpecificArgumentData when the bot encounters a matching command args - whether the command may have arguments (see register_general() for an explanation)
625941b926238365f5f0eceb
def process(img): <NEW_LINE> <INDENT> backproject = cv.CreateImage(cv.GetSize(img), 8, 1) <NEW_LINE> cv.Add(img, backproject, backproject) <NEW_LINE> return backproject
gray = cv.CreateImage(cv.GetSize(img), 8, 1) cv.CvtColor(img, gray, cv.CV_RGB2GRAY) #cv.EqualizeHist(gray, gray) canny = cv.CreateImage(cv.GetSize(img), 8, 1) cv.Canny(gray, canny, THRESHOLD, THRESHOLD*3) combined = cv.CreateImage(cv.GetSize(img), 8, 1) cv.Add(gray, canny, combined)
625941b9796e427e537b0444
def response_load_gateway_by_type_and_id( gateway_type, gateway_id ): <NEW_LINE> <INDENT> if gateway_id == None: <NEW_LINE> <INDENT> return (None, 403, None) <NEW_LINE> <DEDENT> gateway_read_start = storagetypes.get_time() <NEW_LINE> gateway = storage.read_gateway( gateway_id ) <NEW_LINE> if gateway is None: <NEW_LINE> <INDENT> return (None, 404, None) <NEW_LINE> <DEDENT> if GATEWAY_TYPE_TO_STR.get( gateway.gateway_type ) == None: <NEW_LINE> <INDENT> return (None, 400, None) <NEW_LINE> <DEDENT> if GATEWAY_TYPE_TO_STR[ gateway.gateway_type ] != gateway_type: <NEW_LINE> <INDENT> return (None, 401, None) <NEW_LINE> <DEDENT> gateway_read_time = storagetypes.get_time() - gateway_read_start <NEW_LINE> return (gateway, 200, gateway_read_time)
Given a gateway's numeric type and ID, load it from the datastore.
625941b9090684286d50eb62
def lengthOfLongestSubstring(self, s): <NEW_LINE> <INDENT> char_idx = {} <NEW_LINE> max_len = 0 <NEW_LINE> start = 0 <NEW_LINE> for idx, ch in enumerate(s): <NEW_LINE> <INDENT> if ch in char_idx and char_idx[ch] >= start: <NEW_LINE> <INDENT> start = char_idx[ch]+1 <NEW_LINE> <DEDENT> char_idx[ch] = idx <NEW_LINE> max_len = max(max_len, idx-start+1) <NEW_LINE> <DEDENT> return max_len
:type s: str :rtype: int
625941b985dfad0860c3acda
def main(args: Optional[List[str]] = None) -> int: <NEW_LINE> <INDENT> parser = get_parser() <NEW_LINE> opts = parser.parse_args(args=args) <NEW_LINE> print(opts) <NEW_LINE> return 0
Run the main program. This function is executed when you type `neo4j-api` or `python -m neo4j_api`. Arguments: args: Arguments passed from the command line. Returns: An exit code.
625941b9b545ff76a8913ca0
def get_count(query, registry_path, verbatim=True, finetuning=True): <NEW_LINE> <INDENT> book = load_workbook(registry_path, read_only=True) <NEW_LINE> table = row_generator(book['Experiments']) <NEW_LINE> count = 0 <NEW_LINE> for row in table: <NEW_LINE> <INDENT> if verbatim: <NEW_LINE> <INDENT> if query in row: count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> row_strings = list() <NEW_LINE> for el in row: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> row_strings.append(el.encode('utf-8')) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> for el in row_strings: <NEW_LINE> <INDENT> if not finetuning: <NEW_LINE> <INDENT> if query in el and '_ft' not in el: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if query in el: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return count
Returns the number of rows in which this query appears. If verbatim option is True, then query 'abc' would yield 0 even if 'abcd' is in the log. If verbatim is False, then if any element contains the entry it will be counted. Used primarily to determine the seed number on repeated experiments
625941b93eb6a72ae02ec35b
def SaveOverlappingPairsToFile(OverlappingPairs, JsonFileName): <NEW_LINE> <INDENT> OverlappingGenes = {} <NEW_LINE> for pair in OverlappingPairs: <NEW_LINE> <INDENT> if pair[0] in OverlappingGenes: <NEW_LINE> <INDENT> OverlappingGenes[pair[0]].append(pair[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> OverlappingGenes[pair[0]] = [pair[1]] <NEW_LINE> <DEDENT> <DEDENT> newfile = open(JsonFileName + '.json', 'w') <NEW_LINE> json.dump(OverlappingGenes, newfile, sort_keys = True, indent = 4) <NEW_LINE> newfile.close()
(list, str) -> file Take a list of overlapping gene pairs and save overlapping relationships to a json file
625941b963d6d428bbe44371
def set_training_callback(self): <NEW_LINE> <INDENT> get_config().tk_vars["refreshgraph"].trace("w", self.update_current_session) <NEW_LINE> get_config().tk_vars["istraining"].trace("w", self.remove_current_session)
Add a callback to update analysis when the training graph is updated
625941b9e1aae11d1e749b36
def get_test_augmentation(grayscale=False, height=320, width=640, crop_mode = 0): <NEW_LINE> <INDENT> mea = mean <NEW_LINE> st = std <NEW_LINE> if grayscale: <NEW_LINE> <INDENT> mea = (mean[0] + mean[1] + mean[2]) / 3 <NEW_LINE> st = (std[0] + std[1] + std[2]) / 3 <NEW_LINE> <DEDENT> test_transform = [ albu.Resize(height, width) ] <NEW_LINE> test_transform.extend( [ albu.Normalize(mean=mea, std=st, p=1), ToTensor(), ] ) <NEW_LINE> return albu.Compose(test_transform)
Add paddings to make image shape divisible by 32
625941b95166f23b2e1a4fdb
def find_shortest_path(node_parent_map, root, destination, splist): <NEW_LINE> <INDENT> if destination == root: <NEW_LINE> <INDENT> splist.append(root) <NEW_LINE> return splist <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> splist.append(destination) <NEW_LINE> return find_shortest_path(node_parent_map,root, node_parent_map[destination], splist)
After Dijkstra's algorithm creates the graph that contains the shortest path from the root to all nodes, this algorithm will find the shortest path from the destination node back to the root node. :param node_parent_map: the hashmap that maps nodes to their parent node according to the shortest paths from the root :param root: the root node :param destination: the destination node :param splist: the list that will contain the shortest path :return:
625941b94d74a7450ccd4044
def poolNames(self): <NEW_LINE> <INDENT> pools = self.pools.keys() <NEW_LINE> pools.remove('__Corpus__') <NEW_LINE> pools = [pool for pool in pools] <NEW_LINE> pools.sort() <NEW_LINE> return pools
Return a sorted list of Pool names. Does not include the system pool '__Corpus__'.
625941b94d74a7450ccd4045
def _c_optimizations_available(): <NEW_LINE> <INDENT> catch = () if _c_optimizations_required() else (ImportError,) <NEW_LINE> try: <NEW_LINE> <INDENT> from zope.container import _zope_container_contained as c_opt <NEW_LINE> return c_opt <NEW_LINE> <DEDENT> except catch: <NEW_LINE> <INDENT> return False
Return the C optimization module, if available, otherwise a false value. If the optimizations are required but not available, this raises the ImportError. This does not say whether they should be used or not.
625941b9566aa707497f43fb
def _add_modify(self, entry): <NEW_LINE> <INDENT> self._add_history(entry_type='MODIFICATION', entry=entry)
Add the passed string as MODIFICATION to the history
625941b9091ae35668666de7
def test_executetimeout( host ): <NEW_LINE> <INDENT> rv = requests.get(host+("?SERVICE=WPS&Request=Execute&Identifier=lzmtest:testlongprocess&Version=1.0.0" "&MAP=france_parts&DATAINPUTS=PARAM1=1&TIMEOUT=3")) <NEW_LINE> assert rv.status_code == 424
Test execute timeout
625941b966673b3332b91f19
def numRollsToTarget(self, d, f, target): <NEW_LINE> <INDENT> history = dict() <NEW_LINE> def recursion(d,f,target, history): <NEW_LINE> <INDENT> ways = 0 <NEW_LINE> if target > d*f or target < d: <NEW_LINE> <INDENT> return ways <NEW_LINE> <DEDENT> if d == 1: <NEW_LINE> <INDENT> ways = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(1, f+1): <NEW_LINE> <INDENT> if (d-1, target -i) not in history.keys(): <NEW_LINE> <INDENT> temp = recursion(d-1, f, target-i, history) <NEW_LINE> history[(d-1, target - i)] = temp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> temp = history[(d-1, target - i)] <NEW_LINE> <DEDENT> ways += temp <NEW_LINE> <DEDENT> <DEDENT> return ways%(10**9 +7) <NEW_LINE> <DEDENT> return recursion(d,f,target, history)
:type d: int :type f: int :type target: int :rtype: int
625941b98e05c05ec3eea1f3
def leibniz(n=10000): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert isinstance(n, int) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> print("n must be an integer") <NEW_LINE> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> assert n >= 0 <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> print("n must be >= 0") <NEW_LINE> return False <NEW_LINE> <DEDENT> def lbzPicalc(n=10000): <NEW_LINE> <INDENT> yield 2 / ((4*n + 1) * (4*n + 3)) <NEW_LINE> <DEDENT> p = 0 <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> p += lbzPicalc(i).__next__() <NEW_LINE> <DEDENT> return 4 * p
>>> leibniz() 3.1415426535898203 >>> leibniz('hi') n must be an integer False >>> leibniz(-12) n must be >= 0 False
625941b930c21e258bdfa31e
def insert_device_tree(self, abs_dtbo): <NEW_LINE> <INDENT> self._client.insert_device_tree(abs_dtbo)
Insert device tree segment. For device tree segments associated with full / partial bitstreams, users can provide the relative or absolute paths of the dtbo files. Parameters ---------- abs_dtbo : str The absolute path to the device tree segment.
625941b901c39578d7e74cc5
def calculate_pricing_for(self, analyse): <NEW_LINE> <INDENT> institute = self.admission.institution <NEW_LINE> analyse_price = institute.analysepricing_set.filter(analyse_type=analyse.type) <NEW_LINE> if analyse_price: <NEW_LINE> <INDENT> return analyse_price[0].price, analyse_price[0].discount_rate <NEW_LINE> <DEDENT> use_alt_pricing = False <NEW_LINE> institute_discount_rate = None <NEW_LINE> if hasattr(institute, 'institutepricing'): <NEW_LINE> <INDENT> use_alt_pricing = institute.institutepricing.use_alt_pricing <NEW_LINE> institute_discount_rate = institute.institutepricing.discount_rate <NEW_LINE> <DEDENT> list_price = analyse.type.alternative_price if use_alt_pricing else analyse.type.price <NEW_LINE> discounted_price = list_price * (institute_discount_rate or 1) <NEW_LINE> return discounted_price, list_price, institute_discount_rate
Get analyse price and discount rate for this admission.institute Args: analyse: Analyse object Returns: tuple, (discounted price, list price, discount rate)
625941b966656f66f7cbc02c
def test_get_all_sales(self): <NEW_LINE> <INDENT> access_token = self.user_token_get() <NEW_LINE> sales= {"sales": "products"} <NEW_LINE> response = self.client().get('/api/v1/sales', data=sales, content_type='application/json', headers=dict(Authorization="Bearer " + access_token)) <NEW_LINE> self.assertEqual(response.status_code, 200)
Tests API can get all products)
625941b9656771135c3eb6f4
def input_signature(self): <NEW_LINE> <INDENT> return _ccsds_swig.trunk_tail_sptr_input_signature(self)
input_signature(trunk_tail_sptr self) -> io_signature_sptr
625941b98e05c05ec3eea1f4
def pop(self): <NEW_LINE> <INDENT> if self.r: <NEW_LINE> <INDENT> return self.r.pop()
:rtype: void
625941b97d43ff24873a2b26
def test_di(self): <NEW_LINE> <INDENT> exec_instruction("DI", [(ExprId("PSW", 32), ExprInt(1, 32))], [(ExprId("PSW", 32), ExprInt(0, 32))])
Test DI execution
625941b9293b9510aa2c311b
def _get_param(self, param_name): <NEW_LINE> <INDENT> return self.param_dicts[param_name]['get']()
self.param_dictsの値取得関数'get'に基づいて 指定したパラメータの現在値を取得する. Parameters ----------------------- param_name: str 現在値を取得するパラメータ名 Returns ---------------------- self.param_dicts[param_name]['get'](camera): int, float, str, ...etc param_nameの現在値. 型はパラメータ種類に依存.
625941b9e8904600ed9f1dab
def floor(self): <NEW_LINE> <INDENT> year, month, day, hour, minute, second, microsecond = self.year, self.month, self.day, self.hour, self.minute, self.second, self.microsecond <NEW_LINE> if year is None: <NEW_LINE> <INDENT> raise ValueError("Date has no year") <NEW_LINE> <DEDENT> if month is None: month = 1 <NEW_LINE> if day is None: day = 1 <NEW_LINE> if hour is None: hour = 0 <NEW_LINE> if minute is None: minute = 0 <NEW_LINE> if second is None: second = 0 <NEW_LINE> if microsecond is None: microsecond = 0 <NEW_LINE> return datetime(year, month, day, hour, minute, second, microsecond)
Returns a ``datetime`` version of this object with all unspecified (None) attributes replaced by their lowest values. This method raises an error if the ``adatetime`` object has no year. >>> adt = adatetime(year=2009, month=5) >>> adt.floor() datetime.datetime(2009, 5, 1, 0, 0, 0, 0)
625941b915fb5d323cde098c
def runPickPath(pickpath): <NEW_LINE> <INDENT> initDisplays(pickpath.for_init_displays) <NEW_LINE> pickpath_completed = False <NEW_LINE> remaining_source_bin_tags_and_counts = copy.deepcopy(pickpath.source_bins_in_dict) <NEW_LINE> receive_bin_tag = pickpath.receive_bin.tag <NEW_LINE> correctly_pressed_source_bins = list() <NEW_LINE> while not pickpath_completed: <NEW_LINE> <INDENT> pressed_bin_tag = press() <NEW_LINE> if pressed_bin_tag is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> logger.debug("Subject pressed %s." % pressed_bin_tag) <NEW_LINE> if pressed_bin_tag in remaining_source_bin_tags_and_counts: <NEW_LINE> <INDENT> ChangeDisplay(display=pressed_bin_tag, number=0) <NEW_LINE> correctly_pressed_source_bins.append(pressed_bin_tag) <NEW_LINE> pressed_source_bin_count = remaining_source_bin_tags_and_counts.pop(pressed_bin_tag) <NEW_LINE> new_receive_bin_total = sum(remaining_source_bin_tags_and_counts.values()) <NEW_LINE> logger.debug("%s with %d items was pressed. Decrementing total to %d" % (pressed_bin_tag, pressed_source_bin_count, new_receive_bin_total)) <NEW_LINE> ChangeDisplay(display=receive_bin_tag, number=new_receive_bin_total) <NEW_LINE> <DEDENT> elif not remaining_source_bin_tags_and_counts and pressed_bin_tag == receive_bin_tag: <NEW_LINE> <INDENT> for tag in correctly_pressed_source_bins + [receive_bin_tag]: <NEW_LINE> <INDENT> logger.debug("Clearing display %s" % tag) <NEW_LINE> ChangeDisplay(display=tag, layout=EMPTY_LIGHT_LAYOUT) <NEW_LINE> <DEDENT> pickpath_completed = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning("Unexpected button pressed: %s" % pressed_bin_tag)
Function that runs a full pick path. Args: pickpath (dict): keys are displays and values are quantities
625941b9cc40096d615957d5
def get_current_year(): <NEW_LINE> <INDENT> return _get_cal()[0]
Returns the current year.
625941b98e71fb1e9831d62f
def cache_file_write(data): <NEW_LINE> <INDENT> f = None <NEW_LINE> try: <NEW_LINE> <INDENT> with atomicwrites.atomic_write(CACHE_FILE, overwrite=True) as f: <NEW_LINE> <INDENT> json.dump(data, f, indent=2) <NEW_LINE> f.write("\n") <NEW_LINE> <DEDENT> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> util.warn("Failed to write cache file: {}".format(e)) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if f: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pathlib.Path(f.name).unlink() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass
Write provided `data` to cache file, atomically. If this fails, log the error.
625941b931939e2706e4ccf2
def __init__(self, message, cause=None): <NEW_LINE> <INDENT> super(Exception, self).__init__(message) <NEW_LINE> self.cause = cause
construct the exception, providing a message :argument str message: an explantation of the error condition. :argument Exception cause: the exception representing the underlying cause of the exception. If None, there was no such underlying cause.
625941b932920d7e50b2804f
def planar_flow(z, params): <NEW_LINE> <INDENT> D = z.shape[0] <NEW_LINE> num_params = params.shape[0] <NEW_LINE> assert num_params == get_num_flow_params(PlanarFlow, D) <NEW_LINE> _u = params[:D] <NEW_LINE> w = params[D : (2 * D)] <NEW_LINE> b = params[2 * D] <NEW_LINE> wdotu = np.dot(w, _u) <NEW_LINE> m_wdotu = -1.0 + np.log(1.0 + np.exp(wdotu)) <NEW_LINE> u = _u + (m_wdotu - wdotu) * w / np.dot(w, w) <NEW_LINE> out = z + u * np.tanh(np.dot(w, z) + b) <NEW_LINE> phi = (1.0 - np.square(np.tanh(np.dot(w, z) + b))) * w <NEW_LINE> log_det_jac = np.log(np.abs(1.0 + np.dot(u, phi))) <NEW_LINE> return out, log_det_jac
Planar flow operation and log abs det jac. [Insert tex of operation] # Arguments z (np.array): [D,] Input vector. params (np.array): [num_param,] Total parameter vector # Returns out (np.array): [D,] Output of affine flow operation. log_det_jacobian (np.float): Log abs det jac.
625941b9b7558d58953c4d9d
def create(self, project=None): <NEW_LINE> <INDENT> if project is None: <NEW_LINE> <INDENT> project = get_default_project() <NEW_LINE> <DEDENT> if project is None: <NEW_LINE> <INDENT> raise EnvironmentError('Project could not be inferred ' 'from environment.') <NEW_LINE> <DEDENT> query_params = {'project': project} <NEW_LINE> self._properties = self.connection.api_request( method='POST', path='/b', query_params=query_params, data={'name': self.name})
Creates current bucket. If the bucket already exists, will raise :class:`gcloud.exceptions.Conflict`. This implements "storage.buckets.insert". :type project: string :param project: Optional. The project to use when creating bucket. If not provided, falls back to default. :rtype: :class:`gcloud.storage.bucket.Bucket` :returns: The newly created bucket. :raises: :class:`EnvironmentError` if the project is not given and can't be inferred.
625941b94527f215b584c2dd
def get_queryset(self): <NEW_LINE> <INDENT> return Question.objects.filter(pub_date__lte=timezone.now()).order_by('-pub_date')[:5]
Return the last five published questions.
625941b9b5575c28eb68de80
def _convert_estimator_io_to_keras(keras_model, features, labels): <NEW_LINE> <INDENT> def _to_ordered_tensor_list(obj, key_order, obj_name, order_name): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif isinstance(obj, (list, tuple)): <NEW_LINE> <INDENT> return [_convert_tensor(x) for x in obj] <NEW_LINE> <DEDENT> elif isinstance(obj, dict): <NEW_LINE> <INDENT> different_keys = set(obj.keys()) ^ set(key_order) <NEW_LINE> if different_keys: <NEW_LINE> <INDENT> raise KeyError( 'The dictionary passed into {obj_name} does not have the expected ' '{order_name} keys defined in the keras model.' '\n\tExpected keys: {order_keys}' '\n\t{obj_name} keys: {obj_keys}' '\n\tDifference: {different_keys}'.format( order_name=order_name, order_keys=set(key_order), obj_name=obj_name, obj_keys=set(obj.keys()), different_keys=different_keys)) <NEW_LINE> <DEDENT> return [_convert_tensor(obj[key]) for key in key_order] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [_convert_tensor(obj)] <NEW_LINE> <DEDENT> <DEDENT> input_names = None <NEW_LINE> output_names = None <NEW_LINE> if isinstance(features, dict): <NEW_LINE> <INDENT> input_names = ( keras_model.input_names if keras_model._is_graph_network else ['input_%d' % i for i in range(1, len(features) + 1)]) <NEW_LINE> <DEDENT> if isinstance(labels, dict): <NEW_LINE> <INDENT> output_names = ( keras_model.output_names if keras_model._is_graph_network else ['output_%d' % i for i in range(1, len(labels) + 1)]) <NEW_LINE> <DEDENT> input_tensors = _to_ordered_tensor_list( features, input_names, 'features', 'inputs') <NEW_LINE> target_tensors = _to_ordered_tensor_list( labels, output_names, 'labels', 'outputs') <NEW_LINE> return input_tensors, target_tensors
Converts estimator features and labels to keras input and target tensors. Args: keras_model: a compiled `tf.keras.Model` instance, used to determine the order of the returned lists. features: Dict of tensors or `None`. labels: Dict of tensors, a single tensor, or `None`. Returns: Tuple of ( list of input tensors or `None`, list of target tensors or `None`) The order of tensors is determined by the order set in the keras model.
625941b91f5feb6acb0c49d7
def setup_reloader(self): <NEW_LINE> <INDENT> if self.reload: <NEW_LINE> <INDENT> reloader = hupper.start_reloader("wsgiserve.main") <NEW_LINE> reloader.watch_files([self.config])
setup reloader if reload option specified
625941b9498bea3a759b9933
def get_vm_list(self): <NEW_LINE> <INDENT> text = self.run('xe vm-list') <NEW_LINE> self.vm_names = self._parse_vm_uuid(text) <NEW_LINE> return self.vm_names
Return a dict of vm-name to uuid
625941b98a349b6b435e7ff7
def get_rsp(self, service_name, func_name, msg_id): <NEW_LINE> <INDENT> response = self.stub.get_rsp( GetRequest( service_name=service_name, func_name=func_name, msg_id=msg_id, )) <NEW_LINE> return response.payload
Get a response. This is a non-blocking function. :param service_name: The remote service to call. :param func_name: The remote function to call. :param msg_id: The message ID, may be None. :return: The payload of the response.
625941b9cb5e8a47e48b7931
def get_attr(self, key): <NEW_LINE> <INDENT> return self.__dict__[key]
This method gets attribute value of the GtFilter object.
625941b9ec188e330fd5a628
def find_adjacent(overlapping_information: list, existing_nodes: list): <NEW_LINE> <INDENT> result_connections = [] <NEW_LINE> for node in existing_nodes: <NEW_LINE> <INDENT> already_checked = False <NEW_LINE> for c in result_connections: <NEW_LINE> <INDENT> if node in c: <NEW_LINE> <INDENT> already_checked = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if already_checked is True: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> connection_list = [] <NEW_LINE> connection_list.append(node) <NEW_LINE> has_changed = True <NEW_LINE> while has_changed is True: <NEW_LINE> <INDENT> has_changed = False <NEW_LINE> for direct_connection in overlapping_information: <NEW_LINE> <INDENT> will_be_checked = False <NEW_LINE> for n in connection_list: <NEW_LINE> <INDENT> if n in direct_connection: <NEW_LINE> <INDENT> will_be_checked = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if will_be_checked is True: <NEW_LINE> <INDENT> for new_node in direct_connection: <NEW_LINE> <INDENT> if new_node not in connection_list: <NEW_LINE> <INDENT> connection_list.append(new_node) <NEW_LINE> has_changed = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> result_connections.append(connection_list) <NEW_LINE> <DEDENT> return result_connections
Gets a list of directly connected subgraphs and creates the indirect connections. :param overlapping_information: a list of lists each containing direct connections betweeen some subgraphs. :param existing_nodes: a list containing each existing node once. :return: a list of lists each containing all reachable subgraphs with other connected subgraphs in between.
625941b97d847024c06be143
def context_properties(): <NEW_LINE> <INDENT> from . import _data <NEW_LINE> return _data.contextproperties
Returns the list of context properties.
625941b94e696a04525c92d5
def get_hosted_zones_by_domain(Name, region=None, key=None, keyid=None, profile=None): <NEW_LINE> <INDENT> conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) <NEW_LINE> zones = [z for z in _collect_results(conn.list_hosted_zones, 'HostedZones', {}) if z['Name'] == Name] <NEW_LINE> ret = [] <NEW_LINE> for z in zones: <NEW_LINE> <INDENT> ret += get_hosted_zone(Id=z['Id'], region=region, key=key, keyid=keyid, profile=profile) <NEW_LINE> <DEDENT> return ret
Find any zones with the given domain name and return detailed info about them. Note that this can return multiple Route53 zones, since a domain name can be used in both public and private zones. Name The domain name associated with the Hosted Zone(s). region Region to connect to. key Secret key to be used. keyid Access key to be used. profile Dict, or pillar key pointing to a dict, containing AWS region/key/keyid. CLI Example: .. code-block:: bash salt myminion boto3_route53.get_hosted_zones_by_domain salt.org. profile='{"region": "us-east-1", "keyid": "A12345678AB", "key": "xblahblahblah"}'
625941b950485f2cf553cc1b
def third_order_LeviCevita_Tensor(self): <NEW_LINE> <INDENT> e = np.array( [ [ [int((i - j) * (j - k) * (k - i) / 2) for k in range(3)] for j in range(3)] for i in range(3)], dtype=FLOAT) <NEW_LINE> return e
Return a third order Levi_Civita tensor e_ijk. Here, eijk[0, 1, 2] = eijk[1, 2, 0] = eijk[2, 0, 1] = 1 eijk[0, 2, 1] = eijk[2, 1, 0] = eijk[1, 0, 2] = -1
625941b90a50d4780f666d12
def start_cluster(seed_data, cluster): <NEW_LINE> <INDENT> print("=> Starting cassandra cluster SEED nodes") <NEW_LINE> started_nodes = [] <NEW_LINE> for node in seed_data: <NEW_LINE> <INDENT> node_start_cassandra(node['zone'], node['name']) <NEW_LINE> started_nodes.append(node['name']) <NEW_LINE> <DEDENT> print("=> Starting cassandra cluster non-SEED nodes") <NEW_LINE> for z in cluster.keys(): <NEW_LINE> <INDENT> for node in cluster[z]: <NEW_LINE> <INDENT> if node['name'] not in started_nodes: <NEW_LINE> <INDENT> node_start_cassandra(z, node['name'])
Bring up cassandra on cluster nodes, SEEDs first
625941b99c8ee82313fbb5f7
def hold(name=None, pkgs=None, sources=None, **kwargs): <NEW_LINE> <INDENT> on_redhat_5 = __grains__.get('osmajorrelease', None) == '5' <NEW_LINE> lock_pkg = 'yum-versionlock' if on_redhat_5 else 'yum-plugin-versionlock' <NEW_LINE> if lock_pkg not in list_pkgs(): <NEW_LINE> <INDENT> raise SaltInvocationError( 'Packages cannot be held, {0} is not installed.'.format(lock_pkg) ) <NEW_LINE> <DEDENT> if not name and not pkgs and not sources: <NEW_LINE> <INDENT> raise SaltInvocationError( 'One of name, pkgs, or sources must be specified.' ) <NEW_LINE> <DEDENT> if pkgs and sources: <NEW_LINE> <INDENT> raise SaltInvocationError( 'Only one of pkgs or sources can be specified.' ) <NEW_LINE> <DEDENT> targets = [] <NEW_LINE> if pkgs: <NEW_LINE> <INDENT> for pkg in salt.utils.repack_dictlist(pkgs): <NEW_LINE> <INDENT> ret = check_db(pkg) <NEW_LINE> if not ret[pkg]['found']: <NEW_LINE> <INDENT> raise SaltInvocationError( 'Package {0} not available in repository.'.format(name) ) <NEW_LINE> <DEDENT> <DEDENT> targets.extend(pkgs) <NEW_LINE> <DEDENT> elif sources: <NEW_LINE> <INDENT> for source in sources: <NEW_LINE> <INDENT> targets.append(next(six.iterkeys(source))) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ret = check_db(name) <NEW_LINE> if not ret[name]['found']: <NEW_LINE> <INDENT> raise SaltInvocationError( 'Package {0} not available in repository.'.format(name) ) <NEW_LINE> <DEDENT> targets.append(name) <NEW_LINE> <DEDENT> current_locks = get_locked_packages(full=False) <NEW_LINE> ret = {} <NEW_LINE> for target in targets: <NEW_LINE> <INDENT> if isinstance(target, dict): <NEW_LINE> <INDENT> target = next(six.iterkeys(target)) <NEW_LINE> <DEDENT> ret[target] = {'name': target, 'changes': {}, 'result': False, 'comment': ''} <NEW_LINE> if target not in current_locks: <NEW_LINE> <INDENT> if 'test' in __opts__ and __opts__['test']: <NEW_LINE> <INDENT> ret[target].update(result=None) <NEW_LINE> ret[target]['comment'] = ('Package {0} is set to be held.' .format(target)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd = '{yum_command} -q versionlock {0}'.format( target, yum_command=_yum()) <NEW_LINE> out = __salt__['cmd.run_all'](cmd) <NEW_LINE> if out['retcode'] == 0: <NEW_LINE> <INDENT> ret[target].update(result=True) <NEW_LINE> ret[target]['comment'] = ('Package {0} is now being held.' .format(target)) <NEW_LINE> ret[target]['changes']['new'] = 'hold' <NEW_LINE> ret[target]['changes']['old'] = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret[target]['comment'] = ('Package {0} was unable to be held.' .format(target)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ret[target].update(result=True) <NEW_LINE> ret[target]['comment'] = ('Package {0} is already set to be held.' .format(target)) <NEW_LINE> <DEDENT> <DEDENT> return ret
.. versionadded:: 2014.7.0 Hold packages with ``yum -q versionlock``. name The name of the package to be held. Multiple Package Options: pkgs A list of packages to hold. Must be passed as a python list. The ``name`` parameter will be ignored if this option is passed. Returns a dict containing the changes. CLI Example: .. code-block:: bash salt '*' pkg.hold <package name> salt '*' pkg.hold pkgs='["foo", "bar"]'
625941b9851cf427c661a395
def addBinary(self, a, b): <NEW_LINE> <INDENT> a = "0b" + a <NEW_LINE> b = "0b" + b <NEW_LINE> a = eval(a) <NEW_LINE> b = eval(b) <NEW_LINE> c = a + b <NEW_LINE> return bin(c).replace("0b", "")
:type a: str :type b: str :rtype: str
625941b9925a0f43d2549cf6
def recvRtspReply(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> reply = self.rtsp_socket.recv(1024) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if not reply: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> data = reply.decode('utf-8') <NEW_LINE> if DISPLAY_MODE: <NEW_LINE> <INDENT> print('\n@ Data recv:\n' + data) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if not self.parseRtspReply(data): <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> if self.rtsp_reply['Code'] != '200': <NEW_LINE> <INDENT> raise RuntimeError <NEW_LINE> <DEDENT> if (not self.rtsp_request_code == self.DESCRIBE) and (not self.rtsp_request_code == self.SETUP) and (not self.rtsp_request_code == self.TEARDOWN) and (not self.session_id == self.rtsp_reply['Session']): <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> if self.rtsp_request_code == self.DESCRIBE: <NEW_LINE> <INDENT> if not self.__describe(): <NEW_LINE> <INDENT> raise RuntimeError <NEW_LINE> <DEDENT> <DEDENT> elif self.rtsp_request_code == self.SETUP: <NEW_LINE> <INDENT> if not self.__setup(): <NEW_LINE> <INDENT> raise RuntimeError <NEW_LINE> <DEDENT> <DEDENT> elif self.rtsp_request_code == self.PLAY: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif self.rtsp_request_code == self.PAUSE: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif self.rtsp_request_code == self.TEARDOWN: <NEW_LINE> <INDENT> if not self.__teardown(): <NEW_LINE> <INDENT> raise RuntimeError <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> self.updateWindow() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print('@ 服务端错误:服务端(IP = %s, port = %s)回复格式错误!' % (self.server_addr, self.server_port)) <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> print('@ 服务端错误:服务端(IP = %s, port = %s)未实现此命令!' % (self.server_addr, self.server_port)) <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> print('@ 服务端错误:服务端(IP = %s, port = %s)运行时错误!' % (self.server_addr, self.server_port)) <NEW_LINE> <DEDENT> <DEDENT> self.__teardown() <NEW_LINE> try: <NEW_LINE> <INDENT> self.rtsp_socket.shutdown(socket.SHUT_RDWR) <NEW_LINE> self.rtsp_socket.close() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass
接收 RTSP/TCP 回复(循环阻塞)
625941b923e79379d52ee3ea
@APP.post('/_<action>projet/<nom>') <NEW_LINE> @b.auth_basic(a.editeur, _('Réservé aux éditeurs')) <NEW_LINE> @page <NEW_LINE> def projet_action(action, nom): <NEW_LINE> <INDENT> forms = rq.forms.decode() <NEW_LINE> if action == 'recevoir' and forms.action == 'recevoir': <NEW_LINE> <INDENT> return Projet(nom).recevoir(forms.origine) <NEW_LINE> <DEDENT> elif action == 'emettre' and forms.action == 'emettre': <NEW_LINE> <INDENT> return Projet(nom).envoyer( forms.origine, forms.utilisateur, forms.mdp ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> b.redirect(Projet(nom).url)
Envoi/réception vers un dépôt distant
625941b94428ac0f6e5ba675
def _clean_bug_tracker_info(self): <NEW_LINE> <INDENT> use_hosting = self.cleaned_data['bug_tracker_use_hosting'] <NEW_LINE> plan = self.cleaned_data['bug_tracker_plan'] or self.DEFAULT_PLAN_ID <NEW_LINE> bug_tracker_type = self.cleaned_data['bug_tracker_type'] <NEW_LINE> bug_tracker_url = '' <NEW_LINE> if use_hosting: <NEW_LINE> <INDENT> hosting_type = self.cleaned_data['hosting_type'] <NEW_LINE> if hosting_type == self.NO_HOSTING_SERVICE_ID: <NEW_LINE> <INDENT> self.errors['bug_tracker_use_hosting'] = self.error_class([ _('A hosting service must be chosen in order to use this') ]) <NEW_LINE> return <NEW_LINE> <DEDENT> plan = self.cleaned_data['repository_plan'] or self.DEFAULT_PLAN_ID <NEW_LINE> hosting_service_cls = get_hosting_service(hosting_type) <NEW_LINE> assert hosting_service_cls <NEW_LINE> if (hosting_service_cls.supports_bug_trackers and self.cleaned_data.get('hosting_account')): <NEW_LINE> <INDENT> form = self.hosting_repository_forms[hosting_type][plan] <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> hosting_account = self.cleaned_data['hosting_account'] <NEW_LINE> new_data = self.cleaned_data.copy() <NEW_LINE> new_data.update(form.cleaned_data) <NEW_LINE> new_data.update(hosting_account.data) <NEW_LINE> new_data['hosting_account_username'] = hosting_account.username <NEW_LINE> new_data['hosting_url'] = hosting_account.hosting_url <NEW_LINE> try: <NEW_LINE> <INDENT> bug_tracker_url = hosting_service_cls.get_bug_tracker_field(plan, new_data) <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise ValidationError([str(e)]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif bug_tracker_type == self.CUSTOM_BUG_TRACKER_ID: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif bug_tracker_type != self.NO_BUG_TRACKER_ID: <NEW_LINE> <INDENT> hosting_service_cls = get_hosting_service(bug_tracker_type) <NEW_LINE> if not hosting_service_cls: <NEW_LINE> <INDENT> self.errors['bug_tracker_type'] = self.error_class([ _('This bug tracker type is not supported') ]) <NEW_LINE> return <NEW_LINE> <DEDENT> form = self.hosting_bug_tracker_forms[bug_tracker_type][plan] <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> new_data = dict({ key: self.cleaned_data['bug_tracker_%s' % key] for key in ('hosting_account_username', 'hosting_url') }, **{ key.replace(form.prefix, ''): value for key, value in form.cleaned_data.items() }) <NEW_LINE> try: <NEW_LINE> <INDENT> bug_tracker_url = hosting_service_cls.get_bug_tracker_field( plan, new_data) <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise ValidationError([str(e)]) <NEW_LINE> <DEDENT> <DEDENT> self.cleaned_data['bug_tracker'] = bug_tracker_url <NEW_LINE> self.data['bug_tracker'] = bug_tracker_url
Clean the bug tracker information. This will figure out the defaults for all the bug tracker fields, based on the stored bug tracker settings.
625941b9099cdd3c635f0adf
def get_queryset(self): <NEW_LINE> <INDENT> return Pregunta.objects.order_by('-pub_date')[:5]
Retorna las ultimas 5 preguntas.
625941b94c3428357757c1ae
def load_entity_type(self): <NEW_LINE> <INDENT> fqn = self.fqn.split(".") <NEW_LINE> if len(fqn) < 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.entity_type = self.edm_api.get_entity_type( self.edm_api.get_entity_type_id(namespace=fqn[0], name=fqn[1])) <NEW_LINE> <DEDENT> except openlattice.rest.ApiException as exc: <NEW_LINE> <INDENT> self.entity_type = openlattice.EntityType( type=openlattice.FullQualifiedName(), key=[], properties=[] )
Calls the API and loads the entity type information into an instance variable.
625941b9eab8aa0e5d26d9e1
def _impl(inputs, in_state_c, in_state_h, attr, params): <NEW_LINE> <INDENT> in_data = inputs[0] <NEW_LINE> in_weight = inputs[3] <NEW_LINE> in_bias = inputs[7] <NEW_LINE> forget_bias = attr.pop('forget_bias') <NEW_LINE> input_shape = attr['_input_shapes'][inputs[0]] <NEW_LINE> weight_shape = attr['_input_shapes'][inputs[3]] <NEW_LINE> batch_size, input_size = input_shape[0][0], input_shape[0][1] <NEW_LINE> num_hidden_layers = weight_shape[0][1] <NEW_LINE> num_hidden = num_hidden_layers // 4 <NEW_LINE> in_data = _sym.reshape(in_data, shape=(batch_size, input_size)) <NEW_LINE> ixh = _sym.concatenate(*[in_data, in_state_h], axis=1) <NEW_LINE> in_weight = _sym.transpose(in_weight) <NEW_LINE> gates = _sym.dense(ixh, in_weight, in_bias, use_bias=True, units=num_hidden_layers, name="dense") <NEW_LINE> gate_list = _sym.split(gates, indices_or_sections=4, axis=1) <NEW_LINE> in_gate = _sym.sigmoid(gate_list[0]) <NEW_LINE> in_transform = _sym.tanh(gate_list[1]) <NEW_LINE> forget_gate = _sym.sigmoid(gate_list[2]) <NEW_LINE> forget_gate = forget_gate + forget_bias <NEW_LINE> out_gate = _sym.sigmoid(gate_list[3]) <NEW_LINE> next_c = _sym.broadcast_add(_sym.broadcast_mul(forget_gate, in_state_c), _sym.broadcast_mul(in_gate, in_transform)) <NEW_LINE> next_h = out_gate * _sym.tanh(next_c) <NEW_LINE> out_state = _sym.concatenate(*[next_c, next_h]) <NEW_LINE> out_state = _sym.reshape(out_state, shape=(2, batch_size, num_hidden)) <NEW_LINE> return next_h, out_state
LSTM Block cell. Calculations are described in: https://github.com/tensorflow/tensorflow/blob/ r1.8/tensorflow/contrib/rnn/python/ops/lstm_ops.py#L41-L114 Parameters ---------- inputs : nnvm.Symbol Input data in_state_c: list of nnvm.Symbol Cell state input values for all the layers in_state_h: list of nnvm.Symbol Hidden state input values for all the layers attrs : dict Dict of operator attributes params : dict List of pretrained weights and bias Returns ------- sym : nnvm.Symbol Converted nnvm Symbol output: nnvm.Symbol Output state value.
625941b9be7bc26dc91cd488
def test_get_branch(self): <NEW_LINE> <INDENT> pass
Test case for get_branch Get branch # noqa: E501
625941b9627d3e7fe0d68cd2
def hit_point(self, r): <NEW_LINE> <INDENT> theta = self.slope2rad(self.state[-1][2]) <NEW_LINE> y = self.state[-1][1] <NEW_LINE> m = y * np.cos(math.pi/2 -theta) + np.sqrt((y ** 2) * (np.cos(math.pi/2 - theta)) ** 2 - (y ** 2 - r ** 2)) <NEW_LINE> x = m * np.sin(math.pi / 2 - theta) <NEW_LINE> y = m * np.sin(theta) <NEW_LINE> self.state[-1][0] -= x <NEW_LINE> self.state[-1][1] -= y
Calculate the position where light incident the lens. :param r: float the radius of lens
625941b9e1aae11d1e749b37
def message(words,color,x,y): <NEW_LINE> <INDENT> turtle.color(color) <NEW_LINE> turtle.up() <NEW_LINE> turtle.goto(x,y) <NEW_LINE> turtle.down <NEW_LINE> turtle.hideturtle <NEW_LINE> turtle.write(words,move=False,align="left",font=("Arial",30,"normal")) <NEW_LINE> turtle.up()
signature: str,str,int,int -> NoneType message takes a set of words and the color the user wants them printed in. Then it prints it on turtle at the coordinates inputted. This function always prints the message in Arial font at size 30.
625941b9d7e4931a7ee9dd9f
def _identity_lookup( self, mapper, primary_key_identity, identity_token=None, passive=attributes.PASSIVE_OFF, lazy_loaded_from=None, ): <NEW_LINE> <INDENT> key = mapper.identity_key_from_primary_key( primary_key_identity, identity_token=identity_token ) <NEW_LINE> return loading.get_from_identity(self, mapper, key, passive)
Locate an object in the identity map. Given a primary key identity, constructs an identity key and then looks in the session's identity map. If present, the object may be run through unexpiration rules (e.g. load unloaded attributes, check if was deleted). e.g.:: obj = session._identity_lookup(inspect(SomeClass), (1, )) :param mapper: mapper in use :param primary_key_identity: the primary key we are searching for, as a tuple. :param identity_token: identity token that should be used to create the identity key. Used as is, however overriding subclasses can repurpose this in order to interpret the value in a special way, such as if None then look among multiple target tokens. :param passive: passive load flag passed to :func:`.loading.get_from_identity`, which impacts the behavior if the object is found; the object may be validated and/or unexpired if the flag allows for SQL to be emitted. :param lazy_loaded_from: an :class:`.InstanceState` that is specifically asking for this identity as a related identity. Used for sharding schemes where there is a correspondence between an object and a related object being lazy-loaded (or otherwise relationship-loaded). :return: None if the object is not found in the identity map, *or* if the object was unexpired and found to have been deleted. if passive flags disallow SQL and the object is expired, returns PASSIVE_NO_RESULT. In all other cases the instance is returned. .. versionchanged:: 1.4.0 - the :meth:`.Session._identity_lookup` method was moved from :class:`.Query` to :class:`.Session`, to avoid having to instantiate the :class:`.Query` object.
625941b9507cdc57c6306b57
def getHostIpVMK0(netcfg): <NEW_LINE> <INDENT> ipaddr = None <NEW_LINE> for vnic in netcfg: <NEW_LINE> <INDENT> if vnic.device == 'vmk0': <NEW_LINE> <INDENT> ipaddr = vnic.spec.ip.ipAddress <NEW_LINE> <DEDENT> <DEDENT> return ipaddr
GET IP FROM HOST AND VMK0
625941b92c8b7c6e89b35646
def set_activate_port(self, board, port): <NEW_LINE> <INDENT> if (board not in self.boards) or (port not in range(0, self.ports)): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.write_read_data('config', short=True) <NEW_LINE> self.write_read_data('interface adsl 0/{}'.format(board), short=True) <NEW_LINE> self.write_read_data('activate {}'.format(port)) <NEW_LINE> self.write_read_data('quit', short=True) <NEW_LINE> self.write_read_data('quit', short=True)
Активировать порт
625941b90c0af96317bb806c