code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def _construct_angles(self): <NEW_LINE> <INDENT> if not hasattr(self, "_angles"): <NEW_LINE> <INDENT> self._construct_bonded_atoms_list() <NEW_LINE> self._angles = set() <NEW_LINE> for atom1 in self._atoms: <NEW_LINE> <INDENT> for atom2 in self._bondedAtoms[atom1]: <NEW_LINE> <INDENT> for atom3 in self._bondedAtoms[atom2]: <NEW_LINE> <INDENT> if atom1 == atom3: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if atom1.molecule_atom_index < atom3.molecule_atom_index: <NEW_LINE> <INDENT> self._angles.add((atom1, atom2, atom3)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._angles.add((atom3, atom2, atom1))
Get an iterator over all i-j-k angles.
625941b794891a1f4081b8d8
def player_title(player_name: str) -> None: <NEW_LINE> <INDENT> player_name += ' cards' <NEW_LINE> len_title_line: int = ( (int(term_width / 2) - int((len(player_name) / 2) + 1)) + (int(term_width / 2) - int((len(player_name) / 2) + 1)) + len(player_name) ) + 2 <NEW_LINE> end_sep_count: int = 2 if (len_title_line > term_width) else 1 if (len_title_line == term_width) else 0 <NEW_LINE> print( '-' * (int(term_width / 2) - int((len(player_name) / 2) + 1)), player_name.title(), '-' * (int(term_width / 2) - int((len(player_name) / 2) + end_sep_count)) + '\n' )
Function print separator with player name
625941b730bbd722463cbbf2
@login_required <NEW_LINE> def admin(request): <NEW_LINE> <INDENT> all_tags = Tag.objects.all().values_list("name", flat=True) <NEW_LINE> if request.method == "POST" and request.user.is_superuser: <NEW_LINE> <INDENT> image_list = request.FILES.getlist("imagedata") <NEW_LINE> tags = filter(lambda s:s!="", request.POST["tags"].split(",")) <NEW_LINE> new_tags = list(set(tags) - (set(tags) & set(all_tags))) <NEW_LINE> for tag_name in new_tags: <NEW_LINE> <INDENT> Tag.objects.create(name=tag_name) <NEW_LINE> <DEDENT> tags = Tag.objects.filter(name__in=tags) <NEW_LINE> description = request.POST["description"] <NEW_LINE> for image in image_list: <NEW_LINE> <INDENT> image_type = image.name.split(".")[-1] <NEW_LINE> image_name = "%s.%s" % (gen_next_name(), image_type) <NEW_LINE> image_data = image.read() <NEW_LINE> save_image(image_name, image_data, tags, description or image.name) <NEW_LINE> <DEDENT> return HttpResponseRedirect(reverse("admin")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> keyword = request.GET.get("keyword","") <NEW_LINE> if request.GET.get("tags"): <NEW_LINE> <INDENT> tags = Tag.objects.filter(name__in=filter(lambda s:s!="",request.GET["tags"].split(","))) <NEW_LINE> query = Image.objects.filter(description__contains=keyword) <NEW_LINE> for tag in tags: <NEW_LINE> <INDENT> query = query.filter(tag=tag) <NEW_LINE> <DEDENT> image_list = query.order_by("-created").distinct() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> image_list = Image.objects.filter(description__contains=keyword).order_by("-created").distinct() <NEW_LINE> <DEDENT> paginator = Paginator(image_list, 50) <NEW_LINE> page = request.GET.get('page', '1') <NEW_LINE> try: <NEW_LINE> <INDENT> contacts = paginator.page(page) <NEW_LINE> <DEDENT> except (EmptyPage, PageNotAnInteger): <NEW_LINE> <INDENT> contacts = [] <NEW_LINE> <DEDENT> get_q = dict(request.GET.items()) <NEW_LINE> if "page" in get_q: del get_q["page"] <NEW_LINE> for key, val in get_q.items(): <NEW_LINE> <INDENT> if isinstance(val, unicode): get_q[key] = val.encode("utf-8") <NEW_LINE> <DEDENT> nextparams = urllib.urlencode(get_q) <NEW_LINE> number = u"%d件" % len(image_list) <NEW_LINE> return direct_to_template(request, "admin.html", {"contacts":contacts, "number":number, "nextparams":nextparams, "tags":all_tags })
管理画面
625941b7fb3f5b602dac34be
def remainder_(self, divisor): <NEW_LINE> <INDENT> return self.arithmetic_operation(divisor, "remainder", 'FloatTensor')
Computes the element-wise remainder of division, inplace. Parameters ---------- Returns ------- FloatTensor Caller with values inplace
625941b7b57a9660fec336b0
def show_virtual_network(self, uuid): <NEW_LINE> <INDENT> url = '/virtual-network/%s' % uuid <NEW_LINE> return self.get(url)
:param uuid: :return:
625941b74e4d5625662d420d
def load_number_around_mines(self): <NEW_LINE> <INDENT> for row in range(self.HEIGHT): <NEW_LINE> <INDENT> for col in range(self.WIDTH): <NEW_LINE> <INDENT> self.set_number(row, col)
load amount of mines around cell with a mine
625941b78e05c05ec3eea1a1
def getChunk( aList, chunkSize ): <NEW_LINE> <INDENT> for i in range( 0, len( aList ), chunkSize ): <NEW_LINE> <INDENT> yield aList[i:i + chunkSize]
Generator yielding chunk from a list of a size chunkSize. :param list aList: list to be splitted :param integer chunkSize: lenght of one chunk :raise: StopIteration Usage: >>> for chunk in getChunk( aList, chunkSize=10): process( chunk )
625941b7f548e778e58cd3ac
@nox.session(python=DEFAULT_PYTHON_VERSION) <NEW_LINE> def mypy_samples(session): <NEW_LINE> <INDENT> session.install("-e", ".[all]") <NEW_LINE> session.install("ipython", "pytest") <NEW_LINE> session.install(MYPY_VERSION) <NEW_LINE> session.install("types-mock", "types-pytz") <NEW_LINE> session.install("typing-extensions") <NEW_LINE> session.run( "mypy", "--config-file", str(CURRENT_DIRECTORY / "samples" / "mypy.ini"), "--no-incremental", "samples/", )
Run type checks with mypy.
625941b7091ae35668666d96
def isValidMask(mask): <NEW_LINE> <INDENT> p = re.compile(IRC_HOSTMASK_REGEX) <NEW_LINE> return p.match(mask) != None
Returns True if "/xaop add" supplied "mask" is a valid hostmask
625941b750485f2cf553cbc9
def __init__(self, source=None, preserve=1): <NEW_LINE> <INDENT> self.data = {} <NEW_LINE> self.preserve=preserve <NEW_LINE> if source : <NEW_LINE> <INDENT> self.update(source)
Create an empty dictionary, or update from 'dict'.
625941b797e22403b379cdc9
def updateUserProfile(self, authzToken, userProfile): <NEW_LINE> <INDENT> self.send_updateUserProfile(authzToken, userProfile) <NEW_LINE> return self.recv_updateUserProfile()
Parameters: - authzToken - userProfile
625941b7a79ad161976cbf76
def reload_allocations(self): <NEW_LINE> <INDENT> if not self._enable_dhcp(): <NEW_LINE> <INDENT> self.disable() <NEW_LINE> LOG.debug('Killing dnsmasq for network since all subnets have ' 'turned off DHCP: %s', self.network.id) <NEW_LINE> return <NEW_LINE> <DEDENT> self._release_unused_leases() <NEW_LINE> self._spawn_or_reload_process(reload_with_HUP=True) <NEW_LINE> LOG.debug('Reloading allocations for network: %s interface_name:%s', self.network.id, self.interface_name) <NEW_LINE> self.device_manager.update(self.network, self.interface_name)
Rebuild the dnsmasq config and signal the dnsmasq to reload.
625941b7de87d2750b85fbbe
def wait_for_job(cook_url, job_id, status, max_wait_ms=DEFAULT_TIMEOUT_MS): <NEW_LINE> <INDENT> return wait_for_jobs(cook_url, [job_id], status, max_wait_ms)[0]
Wait for the given job's status to change to the specified value.
625941b745492302aab5e0f0
def validate_response_contains_expected_response(self, json_actual_response, expected_response_dict, ignored_keys=None, full_list_validation=False, identity_key="", sort_lists=False, **kwargs): <NEW_LINE> <INDENT> if not json_actual_response: <NEW_LINE> <INDENT> zoomba.fail("The Actual Response is Empty.") <NEW_LINE> return <NEW_LINE> <DEDENT> actual_response_dict = json.loads(json_actual_response) <NEW_LINE> unmatched_keys_list = [] <NEW_LINE> if not isinstance(actual_response_dict, list) and actual_response_dict: <NEW_LINE> <INDENT> if actual_response_dict == expected_response_dict: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.key_by_key_validator(actual_response_dict, expected_response_dict, ignored_keys, unmatched_keys_list, full_list_validation=full_list_validation, sort_lists=sort_lists, **kwargs) <NEW_LINE> self.generate_unmatched_keys_error_message(unmatched_keys_list) <NEW_LINE> return <NEW_LINE> <DEDENT> if isinstance(actual_response_dict, list) and actual_response_dict: <NEW_LINE> <INDENT> if full_list_validation: <NEW_LINE> <INDENT> return self.full_list_validation(actual_response_dict, expected_response_dict, unmatched_keys_list, ignored_keys, sort_lists=sort_lists, **kwargs) <NEW_LINE> <DEDENT> for exp_item in expected_response_dict: <NEW_LINE> <INDENT> for actual_item in actual_response_dict: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if exp_item[identity_key] == actual_item[identity_key]: <NEW_LINE> <INDENT> self.key_by_key_validator(actual_item, exp_item, ignored_keys, unmatched_keys_list, full_list_validation=full_list_validation, sort_lists=sort_lists, **kwargs) <NEW_LINE> self.generate_unmatched_keys_error_message(unmatched_keys_list) <NEW_LINE> break <NEW_LINE> <DEDENT> elif actual_response_dict[-1] == actual_item: <NEW_LINE> <INDENT> zoomba.fail('Item was not within the response:\n' + str(exp_item)) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> zoomba.fail('KeyError: "' + identity_key + '" Key was not in the response') <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> zoomba.fail("The Actual Response is Empty.")
This is the most used method for validating Request responses from an API against a supplied expected response. It performs an object to object comparison between two json objects, and if that fails, a more in depth method is called to find the exact discrepancies between the values of the provided objects. Additionally, a list of keys to ignore on the comparison may be supplied, for keys' values to be ignored./n json_actual_response: (request response object) The response from an API. expected_response_dict: (json) The expected response, in json format. ignored_keys: (strings list) A list of strings of the keys to be ignored on the validation. full_list_validation: (bool) Check that the entire list matches the expected response, defaults to False. identity_key: (string) Key to match items to, defaults to 'id'. sort_lists: (bool) Sort lists before doing key by key validation, defaults to False. **kwargs: (dict) Currently supported kwargs are margin_type and margin_amt margin_type: (string) The type of unit of time to be used to generate a delta for the date comparisons. margin_amt: (string/#) The amount of units specified in margin_type to allot for difference between dates. return: There is no actual returned output, other than error messages when comparisons fail.
625941b766656f66f7cbbfda
def isWordGuessed(secretWord, lettersGuessed): <NEW_LINE> <INDENT> charB = " " <NEW_LINE> n=0 <NEW_LINE> n2=0 <NEW_LINE> result =0 <NEW_LINE> charA = lettersGuessed[n2] <NEW_LINE> charB = secretWord[n] <NEW_LINE> while True: <NEW_LINE> <INDENT> if result == len(secretWord): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if str(charA) == charB: <NEW_LINE> <INDENT> result += 1 <NEW_LINE> n2 += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if n+1 > len(secretWord) != False: <NEW_LINE> <INDENT> n+1 <NEW_LINE> <DEDENT> else: n2 += 1 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False
secretWord: string, the word the user is guessing lettersGuessed: list, what letters have been guessed so far returns: boolean, True if all the letters of secretWord are in lettersGuessed; False otherwise
625941b76aa9bd52df036bd2
def __len__(self): <NEW_LINE> <INDENT> return self.npoints
Return length of result set
625941b7236d856c2ad4460f
def __trunc__(a): <NEW_LINE> <INDENT> if a._numerator < 0: <NEW_LINE> <INDENT> return -(-a._numerator // a._denominator) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return a._numerator // a._denominator
math.trunc(a)
625941b732920d7e50b27ffc
def _json(self): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for k, v in self.items(): <NEW_LINE> <INDENT> if isinstance(v, InsensitiveDict): <NEW_LINE> <INDENT> d[k] = v._json() <NEW_LINE> <DEDENT> elif type(v) in (list, tuple): <NEW_LINE> <INDENT> l = [] <NEW_LINE> for i in v: <NEW_LINE> <INDENT> if isinstance(i, InsensitiveDict): <NEW_LINE> <INDENT> l.append(i._json()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l.append(i) <NEW_LINE> <DEDENT> <DEDENT> if type(v) is tuple: <NEW_LINE> <INDENT> v = tuple(l) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = l <NEW_LINE> <DEDENT> d[k] = v <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d[k] = v <NEW_LINE> <DEDENT> <DEDENT> return d
converts an InsensitiveDict to a dictionary
625941b7090684286d50eb10
def _exclude_items(self, section, items): <NEW_LINE> <INDENT> save_groups = [] <NEW_LINE> for item in items: <NEW_LINE> <INDENT> group = self._get_group(item) <NEW_LINE> if group is not None: <NEW_LINE> <INDENT> add_group = False <NEW_LINE> perms = getattr(group, self.group_perms_key) <NEW_LINE> for i in range(len(perms)-1, -1, -1): <NEW_LINE> <INDENT> if getattr(perms[i], self.perm_name_key) == section: <NEW_LINE> <INDENT> del perms[i] <NEW_LINE> add_group = True <NEW_LINE> <DEDENT> <DEDENT> if add_group: <NEW_LINE> <INDENT> save_groups.append(group) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.Group.bulk_save(save_groups)
Remove groups from a permission. :param section: The name of the permission to remove groups from. :param items: A list containing names of groups to remove from the permission.
625941b767a9b606de4a7ced
@contextmanager <NEW_LINE> def chmod(path, mode): <NEW_LINE> <INDENT> orig_mode = stat.S_IMODE(os.stat(path).st_mode) <NEW_LINE> os.chmod(path, mode) <NEW_LINE> try: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.chmod(path, orig_mode) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error("Failed to restore %r mode: %s", path, e)
Changes path permissions. Change the permissions of path to the numeric mode before entering the context, and restore the original value when exiting from the context. Arguments: path (str): file/directory path mode (int): new mode
625941b7f8510a7c17cf9535
def __init__(self): <NEW_LINE> <INDENT> self.host = '10.0.0.247' <NEW_LINE> self.port = 3306 <NEW_LINE> self.user = 'pamodata' <NEW_LINE> self.passwd = 'pamodata' <NEW_LINE> self.db = 'db_pamodata' <NEW_LINE> self.charset = 'utf8mb4'
初始化
625941b773bcbd0ca4b2beae
def add_basic_contact_brick(self, varname_u, multname_n, multname_t=None, *args): <NEW_LINE> <INDENT> return self.set("add_basic_contact_brick", varname_u, multname_n, multname_t, *args)
Synopsis: ind = Model.add_basic_contact_brick(self, string varname_u, string multname_n[, string multname_t], string dataname_r, Spmat BN[, Spmat BT, string dataname_friction_coeff][, string dataname_gap[, string dataname_alpha[, int augmented_version[, string dataname_gamma, string dataname_wt]]]) Add a contact with or without friction brick to the model. If U is the vector of degrees of freedom on which the unilateral constraint is applied, the matrix `BN` have to be such that this constraint is defined by :math:`B_N U \le 0`. A friction condition can be considered by adding the three parameters `multname_t`, `BT` and `dataname_friction_coeff`. In this case, the tangential displacement is :math:`B_T U` and the matrix `BT` should have as many rows as `BN` multiplied by :math:`d-1` where :math:`d` is the domain dimension. In this case also, `dataname_friction_coeff` is a data which represents the coefficient of friction. It can be a scalar or a vector representing a value on each contact condition. The unilateral constraint is prescribed thank to a multiplier `multname_n` whose dimension should be equal to the number of rows of `BN`. If a friction condition is added, it is prescribed with a multiplier `multname_t` whose dimension should be equal to the number of rows of `BT`. The augmentation parameter `r` should be chosen in a range of acceptabe values (see Getfem user documentation). `dataname_gap` is an optional parameter representing the initial gap. It can be a single value or a vector of value. `dataname_alpha` is an optional homogenization parameter for the augmentation parameter (see Getfem user documentation). The parameter `augmented_version` indicates the augmentation strategy : 1 for the non-symmetric Alart-Curnier augmented Lagrangian, 2 for the symmetric one (except for the coupling between contact and Coulomb friction), 3 for the unsymmetric method with augmented multipliers, 4 for the unsymmetric method with augmented multipliers and De Saxce projection.
625941b7d486a94d0b98df7f
def on_motion(event): <NEW_LINE> <INDENT> global _clock_window <NEW_LINE> delta_x = event.x - _clock_window.x <NEW_LINE> delta_y = event.y - _clock_window.y <NEW_LINE> res_x = _clock_window.winfo_x() + delta_x <NEW_LINE> res_y = _clock_window.winfo_y() + delta_y <NEW_LINE> _clock_window.geometry("+"+str(res_x)+"+"+str(res_y))
on_motion docs
625941b7d4950a0f3b08c18b
def create_from_uri(uri): <NEW_LINE> <INDENT> result = None <NEW_LINE> root, ext = os.path.splitext(uri) <NEW_LINE> if ext.lower() == '.shelve' or ext.lower() == '.shelf': <NEW_LINE> <INDENT> result = ShelveReader(uri) <NEW_LINE> <DEDENT> return result
A shelve file is a .shelve
625941b73539df3088e2e17c
def __setup__(self, experiment): <NEW_LINE> <INDENT> mid = experiment.mid <NEW_LINE> name = experiment.name <NEW_LINE> composition = info_table.get_composition(conn, mid) <NEW_LINE> notes = info_table.get_notes(conn, mid) <NEW_LINE> units = info_table.get_units(conn, mid) <NEW_LINE> type = info_table.get_type(conn, mid) <NEW_LINE> total_peaks = peaks_table.get_peak_count(conn, mid) <NEW_LINE> assigned = experiment.get_assigned_peaks_count() <NEW_LINE> invalid_mol, invalid_peaks = experiment.get_invalidated_peaks_count() <NEW_LINE> valid_mol, valid_peaks = experiment.get_validated_count() <NEW_LINE> pending_peaks = assigned - valid_peaks - invalid_peaks <NEW_LINE> pending_mol = len(experiment.molecule_matches) - invalid_mol - valid_mol <NEW_LINE> unnassigned = total_peaks - valid_peaks <NEW_LINE> self.ui.experiment_name_lbl.setText(name) <NEW_LINE> self.ui.composition_val.setText(composition) <NEW_LINE> self.ui.notes_val.setText(notes) <NEW_LINE> self.ui.units_val.setText(units) <NEW_LINE> self.ui.type_val.setText(type) <NEW_LINE> self.ui.total_peaks_val.setText(str(total_peaks)) <NEW_LINE> self.ui.invalid_mol_num.display(invalid_mol) <NEW_LINE> self.ui.invalid_peaks_num.display(invalid_peaks) <NEW_LINE> self.ui.valid_mol_num.display(valid_mol) <NEW_LINE> self.ui.valid_peaks_num.display(valid_peaks) <NEW_LINE> self.ui.pending_mol_num.display(pending_mol) <NEW_LINE> self.ui.pending_peaks_num.display(pending_peaks) <NEW_LINE> self.ui.unnassigned_peaks_num.display(unnassigned) <NEW_LINE> self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
:param experiment: :return:
625941b78e7ae83300e4adfc
def post(self): <NEW_LINE> <INDENT> request_json = request.get_json(silent=True) <NEW_LINE> username: str = request_json['username'] <NEW_LINE> avatar_url: str = request_json.get('avatar_url', '') <NEW_LINE> try: <NEW_LINE> <INDENT> user = UserRepository.create(username, avatar_url) <NEW_LINE> return user, 200 <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> response = jsonify(e.to_dict()) <NEW_LINE> response.status_code = e.status_code <NEW_LINE> return response
Create user
625941b7d18da76e23532302
def star_helper(r, s): <NEW_LINE> <INDENT> temp = star_index(r, s) <NEW_LINE> if temp is False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return regex_match(r, s[temp + 1:])
(RegexTree, str) -> bool This function takes the index where the part of the string matches the regex tree, and call star case with everything after the string to see if the rest of it matches, false if it does not match at all >>> star_helper(StarTree(BarTree(Leaf('1'), Leaf('2'))), '12') True >>> star_helper(StarTree(BarTree(Leaf('1'), Leaf('2'))), '01') False
625941b716aa5153ce3622a9
def test_issue3830_with_subtok(): <NEW_LINE> <INDENT> parser = DependencyParser(Vocab(), learn_tokens=True) <NEW_LINE> parser.add_label("nsubj") <NEW_LINE> assert "subtok" not in parser.labels <NEW_LINE> parser.begin_training(lambda: []) <NEW_LINE> assert "subtok" in parser.labels
Test that the parser does have subtok label if learn_tokens=True.
625941b7aad79263cf39086c
def SetPROJSearchPath(*args): <NEW_LINE> <INDENT> return _osr.SetPROJSearchPath(*args)
SetPROJSearchPath(char const * utf8_path)
625941b738b623060ff0ac20
def print_text(text, *args, **kwargs): <NEW_LINE> <INDENT> print('text:' + text)
Print just the text value
625941b7566aa707497f43ab
def settings(): <NEW_LINE> <INDENT> return Settings(keypirinha_api.app_settings())
Return the :py:class:`Settings` object associated with the application. Note: * Settings might change at any time if the end-user decides to edit the configuration file. In thise case, the application will notify every loaded plugins by calling :py:meth:`Plugin.on_events` with the :py:const:`Events.APPCONFIG` flag. * The values returned by the :py:class:`Settings` object are always up-to-date so you do not have to get a new object in case of a :py:meth:`Plugin.on_events` notification. * If the end-user did not overwrite any default value (e.g.: empty configuration file), the dictionary will always be populated with the values in use (i.e.: the default ones). See Also: :py:meth:`Plugin.load_settings`
625941b7627d3e7fe0d68c7f
def volume_update_status_based_on_attachment(context, volume_id): <NEW_LINE> <INDENT> return IMPL.volume_update_status_based_on_attachment(context, volume_id)
Update volume status according to attached instance id
625941b71d351010ab85594e
def p_not_condition(p): <NEW_LINE> <INDENT> p[0] = (p[1], p[2], p[3])
not_condition : NAME NOTEQUAL nis
625941b75fc7496912cc37b7
def __match_trace(self, input_reader): <NEW_LINE> <INDENT> if (input_reader.trace): <NEW_LINE> <INDENT> print("Try: %s"%str(self)) <NEW_LINE> try: <NEW_LINE> <INDENT> m = self.__match(input_reader) <NEW_LINE> print('Match of rule type %s'%type(self)) <NEW_LINE> print(type(m)) <NEW_LINE> print("Done %s. Matched %s"%(str(self),str(m))) <NEW_LINE> return m <NEW_LINE> <DEDENT> except ParseException: <NEW_LINE> <INDENT> print("%s failed at line %d:%d"%(str(self),input_reader.line, input_reader.linePos)) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.__match(input_reader)
output a trace string indicating which rule is currently tried
625941b707f4c71912b112b8
def neighboring_non_walls(self, pos): <NEW_LINE> <INDENT> for neighbor in util.iterate_neighbors(pos): <NEW_LINE> <INDENT> if not self.is_wall(neighbor): <NEW_LINE> <INDENT> yield neighbor
Find the non-walls neighboring the given position.
625941b7a4f1c619b28afe73
def _add_commit_rtag(self, rtag_type, who): <NEW_LINE> <INDENT> self.commit.AddRtag(rtag_type, who)
Add a response tag to the current commit Args: rtag_type (str): rtag type (e.g. 'Reviewed-by') who (str): Person who gave that rtag, e.g. 'Fred Bloggs <[email protected]>'
625941b7e64d504609d74671
def send_alert(self, name: str, monitor: Monitor) -> None: <NEW_LINE> <INDENT> alert_type = self.should_alert(monitor) <NEW_LINE> command = None <NEW_LINE> downtime = monitor.get_downtime() <NEW_LINE> if monitor.is_remote(): <NEW_LINE> <INDENT> host = monitor.running_on <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> host = self.hostname <NEW_LINE> <DEDENT> if alert_type == AlertType.NONE: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if alert_type == AlertType.FAILURE: <NEW_LINE> <INDENT> command = self.fail_command <NEW_LINE> <DEDENT> elif alert_type == AlertType.SUCCESS: <NEW_LINE> <INDENT> command = self.success_command <NEW_LINE> <DEDENT> elif alert_type == AlertType.CATCHUP: <NEW_LINE> <INDENT> if self.catchup_command == "fail_command": <NEW_LINE> <INDENT> command = self.fail_command <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.alerter_logger.error("Unknown alert type %s", alert_type) <NEW_LINE> return <NEW_LINE> <DEDENT> if command is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> command = command.format( hostname=host, name=name, days=downtime.days, hours=downtime.hours, minutes=downtime.minutes, seconds=downtime.seconds, failed_at=format_datetime(monitor.first_failure_time()), virtual_fail_count=monitor.virtual_fail_count(), info=monitor.get_result(), description=monitor.describe(), last_virtual_fail_count=monitor.last_virtual_fail_count(), failure_doc=monitor.failure_doc, ) <NEW_LINE> if not self._dry_run: <NEW_LINE> <INDENT> self.alerter_logger.debug("About to execute command: %s", command) <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.call(shlex.split(command)) <NEW_LINE> self.alerter_logger.debug("Command has finished.") <NEW_LINE> <DEDENT> except subprocess.SubprocessError: <NEW_LINE> <INDENT> self.alerter_logger.exception( "Exception encountered running command: %s", command ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.alerter_logger.info("Would run command: %s", command)
Execute the command
625941b7be7bc26dc91cd436
def save_target_urls(self, target_urls: List[str]): <NEW_LINE> <INDENT> for url in target_urls: <NEW_LINE> <INDENT> if url not in self.db_target_pages: <NEW_LINE> <INDENT> data, errs = TargetPage(strict=True).load({'page_url': url, 'status': False, 'note': '', 'extracted_at': ''}) <NEW_LINE> self.db_target_pages[url] = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info('URL={} is already in target. Skip.'.format(url)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.db_target_pages.commit()
Save target URL into DB.
625941b726068e7796caeb09
def fdmobilenet_w3d4(**kwargs): <NEW_LINE> <INDENT> return get_mobilenet(version="fd", width_scale=0.75, model_name="fdmobilenet_w3d4", **kwargs)
FD-MobileNet 0.75x model from 'FD-MobileNet: Improved MobileNet with A Fast Downsampling Strategy,' https://arxiv.org/abs/1802.03750. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.tensorflow/models' Location for keeping the model parameters.
625941b7adb09d7d5db6c5c4
@add_method(Case) <NEW_LINE> def nnc_connections_static_values_async(self, property_name): <NEW_LINE> <INDENT> return self.__nnc_connections_values_async( property_name, NNCProperties_pb2.NNC_STATIC, 0 )
Get the static NNC values. Async, so returns an iterator. Returns: An iterator to a chunk object containing an list of doubles. Loop through the chunks and then the values within the chunk to get values for all the connections. The order of the list matches the list from nnc_connections, i.e. the nth object of nnc_connections() refers to nth value in this list.
625941b7097d151d1a222c8d
def setDefaultColormap(self, colormap=None): <NEW_LINE> <INDENT> self._plot.setDefaultColormap(colormap)
Sets the colormap that will be applied by the backend to an image if no colormap is applied to it. A colormap is a dictionary with the keys: :type name: string :type normalization: string (linear, log) :type autoscale: boolean :type vmin: float, minimum value :type vmax: float, maximum value :type colors: integer (typically 256) If None is passed, the backend will reset to its default colormap.
625941b726068e7796caeb0a
def GetNext(self, prev): <NEW_LINE> <INDENT> next = [] <NEW_LINE> prev_pos = self._cities[prev.Value] <NEW_LINE> for name, position in self._cities.items(): <NEW_LINE> <INDENT> if name != prev.Value: <NEW_LINE> <INDENT> next.append(SearchNode(name, self._distance(prev_pos, position))) <NEW_LINE> <DEDENT> <DEDENT> return next
Return children nodes :param prev: previous node :return: Children
625941b796565a6dacc8f506
def __init__(self, method, host, port): <NEW_LINE> <INDENT> self.port = port <NEW_LINE> self.method = method <NEW_LINE> self.host = host <NEW_LINE> self.token = None <NEW_LINE> self.default_port = None <NEW_LINE> self._remote = None <NEW_LINE> self._callback = None
Initialize Bridge.
625941b7379a373c97cfa97c
def get( self, location_name, name, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2018-09-15" <NEW_LINE> accept = "application/json" <NEW_LINE> url = self.get.metadata['url'] <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), 'locationName': self._serialize.url("location_name", location_name, 'str'), 'name': self._serialize.url("name", name, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> deserialized = self._deserialize('OperationResult', pipeline_response) <NEW_LINE> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized
Get operation. :param location_name: The name of the location. :type location_name: str :param name: The name of the operation. :type name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: OperationResult, or the result of cls(response) :rtype: ~azure.mgmt.devtestlabs.models.OperationResult :raises: ~azure.core.exceptions.HttpResponseError
625941b7fbf16365ca6f5fed
def signal_rshift(shift: int) -> Callable[[int], int]: <NEW_LINE> <INDENT> return lambda x: limit_signal(x >> shift)
A circuit element performing a fixed RSHIFT on a signal.
625941b77c178a314d6ef28a
def test_authority_ids_none(self): <NEW_LINE> <INDENT> from querying import get_authority_ids <NEW_LINE> authors = [ ([('0', 'CERN12345')], ' ', ' ', '', 1), ([('i', 'inspire')], ' ', ' ', '', 1), ([('u', 'INSPIRE-123')], ' ', ' ', '', 1), ([('0', 'INSPIRE-')], ' ', ' ', '', 1), ([('0', 'CERN-')], ' ', ' ', '', 1), ([('0', 'CCID--1')], ' ', ' ', '', 1) ] <NEW_LINE> authority_ids_expected = {} <NEW_LINE> for author in authors: <NEW_LINE> <INDENT> self.assertEqual(get_authority_ids(author), authority_ids_expected)
Test `author` objects by expecting empty dictionary.
625941b7d99f1b3c44c673c9
def get_embed(input_data, vocab_size, embed_dim): <NEW_LINE> <INDENT> embedding = tf.Variable(tf.truncated_normal((vocab_size,embed_dim),mean = 0,stddev= 0.1)) <NEW_LINE> embed = tf.nn.embedding_lookup(embedding,input_data) <NEW_LINE> return embed
Create embedding for <input_data>. :param input_data: TF placeholder for text input. :param vocab_size: Number of words in vocabulary. :param embed_dim: Number of embedding dimensions :return: Embedded input.
625941b7004d5f362079a168
def restoreTabs(self): <NEW_LINE> <INDENT> fileList = self.configuration.setting("ListOfOpenFilesAndPanels") <NEW_LINE> for i in range(0, len(fileList), 2): <NEW_LINE> <INDENT> panel = int(fileList[i + 1]) <NEW_LINE> self.loadFile(fileList[i], True, panel) <NEW_LINE> <DEDENT> self.splitter.restoreState(self.configuration.setting("PanelSplitterState")) <NEW_LINE> currentTabIndex = self.configuration.setting("CurrentTabIndex") <NEW_LINE> currentPanelIndex = self.configuration.setting("CurrentPanelIndex") <NEW_LINE> self.panels[currentPanelIndex].setCurrentIndex(currentTabIndex)
restores tabs based on the last saved session
625941b7e8904600ed9f1d5a
def xkcd_palette(colors): <NEW_LINE> <INDENT> palette = [xkcd_rgb[name] for name in colors] <NEW_LINE> return color_palette(palette, len(palette))
Make a palette with color names from the xkcd color survey. This is just a simple wrapper around the seaborn.xkcd_rbg dictionary. See xkcd for the full list of colors: http://xkcd.com/color/rgb/
625941b7bde94217f3682c2e
def delta(self, enemy_position, target_position): <NEW_LINE> <INDENT> coordinates = [x - y for x, y in zip(enemy_position, target_position)] <NEW_LINE> return sum(int(math.fabs(x)) for x in coordinates)
Find the distance in tiles to the target
625941b78a43f66fc4b53e9b
def sale_records_info(sale, features): <NEW_LINE> <INDENT> invoice = sale.invoice <NEW_LINE> sale_features = [x for x in SALE_FEATURES if x in features] <NEW_LINE> course_reg_features = [x for x in COURSE_REGISTRATION_FEATURES if x in features] <NEW_LINE> sale_dict = dict((feature, getattr(invoice, feature)) for feature in sale_features) <NEW_LINE> total_used_codes = RegistrationCodeRedemption.objects.filter( registration_code__in=sale.courseregistrationcode_set.all() ).count() <NEW_LINE> sale_dict.update({"invoice_number": invoice.id}) <NEW_LINE> sale_dict.update({"total_codes": sale.courseregistrationcode_set.all().count()}) <NEW_LINE> sale_dict.update({'total_used_codes': total_used_codes}) <NEW_LINE> codes = [reg_code.code for reg_code in sale.courseregistrationcode_set.all()] <NEW_LINE> if len(codes) > 0: <NEW_LINE> <INDENT> obj_course_reg_code = sale.courseregistrationcode_set.all()[:1].get() <NEW_LINE> course_reg_dict = dict((feature, getattr(obj_course_reg_code, feature)) for feature in course_reg_features) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> course_reg_dict = dict((feature, None) for feature in course_reg_features) <NEW_LINE> <DEDENT> course_reg_dict['course_id'] = text_type(course_id) <NEW_LINE> course_reg_dict.update({'codes': ", ".join(codes)}) <NEW_LINE> sale_dict.update(dict(list(course_reg_dict.items()))) <NEW_LINE> return sale_dict
Convert sales records to dictionary
625941b73d592f4c4ed1ceb2
def fbank(signal,samplerate=16000,winlen=0.025,winstep=0.01, nfilt=26,nfft=512,lowfreq=0,highfreq=None,preemph=0.97): <NEW_LINE> <INDENT> highfreq= highfreq or samplerate/2 <NEW_LINE> signal = sigproc.preemphasis(signal,preemph) <NEW_LINE> frames = sigproc.framesig(signal, winlen*samplerate, winstep*samplerate) <NEW_LINE> pspec = sigproc.powspec(frames,nfft) <NEW_LINE> energy = numpy.sum(pspec,1) <NEW_LINE> energy = numpy.where(energy == 0,numpy.finfo(float).eps,energy) <NEW_LINE> fb = get_filterbanks(nfilt,nfft,samplerate,lowfreq,highfreq) <NEW_LINE> feat = numpy.dot(pspec,fb.T) <NEW_LINE> feat = numpy.where(feat == 0,numpy.finfo(float).eps,feat) <NEW_LINE> return feat,energy
Compute Mel-filterbank energy features from an audio signal. :param signal: the audio signal from which to compute features. Should be an N*1 array :param samplerate: the samplerate of the signal we are working with. :param winlen: the length of the analysis window in seconds. Default is 0.025s (25 milliseconds) :param winstep: the step between successive windows in seconds. Default is 0.01s (10 milliseconds) :param nfilt: the number of filters in the filterbank, default 26. :param nfft: the FFT size. Default is 512. :param lowfreq: lowest band edge of mel filters. In Hz, default is 0. :param highfreq: highest band edge of mel filters. In Hz, default is samplerate/2 :param preemph: apply preemphasis filter with preemph as coefficient. 0 is no filter. Default is 0.97. :returns: 2 values. The first is a numpy array of size (NUMFRAMES by nfilt) containing features. Each row holds 1 feature vector. The second return value is the energy in each frame (total energy, unwindowed)
625941b7d6c5a10208143e79
def __init__( self, location_map={}, minified=False, asset_version=None, default_location=None): <NEW_LINE> <INDENT> self.location_map = location_map <NEW_LINE> self.minified = minified <NEW_LINE> self.asset_version = asset_version <NEW_LINE> self.default_location = default_location <NEW_LINE> if self.default_location is not None: <NEW_LINE> <INDENT> self.get_location_by_name(self.default_location)
Constructor Args: location_map (dict): An hash with the default AssetLocation instances available minified (bool): If true, the minified version of the resources are returned asset_version (str): The default version of the assets to be returned default_location (str): The default location used, when is asked for the instance to get an URL for a resource
625941b7796e427e537b03f4
def data_entry(): <NEW_LINE> <INDENT> with open('/home/jaremciuc/data_analysis_test/assignment_data.csv') as fin: <NEW_LINE> <INDENT> c = db.cursor() <NEW_LINE> dr = csv.DictReader(fin) <NEW_LINE> to_db = [(i['id'], i['title'], i['features'], i['living_area'], i['total_area'], i['plot_area'], i['price']) for i in dr] <NEW_LINE> c.executemany("INSERT INTO Casafari (id,title,features,living_area,total_area,plot_area,price) VALUES (?, ?, ?, ?, ?, ?, ?);", to_db) <NEW_LINE> db.commit() <NEW_LINE> c.close() <NEW_LINE> print('Data entered successfully')
Inserts the data into the database
625941b723e79379d52ee39a
def cmd_unmaximize(ensoapi): <NEW_LINE> <INDENT> cmd_restore(ensoapi)
Unmaximize window if it is maximized
625941b77b25080760e3928c
def get_detector_for_component(move_info, component): <NEW_LINE> <INDENT> detectors = move_info.detectors <NEW_LINE> selected_detector = None <NEW_LINE> if component == "HAB": <NEW_LINE> <INDENT> selected_detector = detectors[DetectorType.to_string(DetectorType.HAB)] <NEW_LINE> <DEDENT> elif component == "LAB": <NEW_LINE> <INDENT> selected_detector = detectors[DetectorType.to_string(DetectorType.LAB)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for _, detector in list(detectors.items()): <NEW_LINE> <INDENT> if detector.detector_name == component or detector.detector_name_short == component: <NEW_LINE> <INDENT> selected_detector = detector <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return selected_detector
Get the detector for the selected component. The detector can be either an actual component name or a HAB, LAB abbreviation :param move_info: a SANSStateMove object :param component: the selected component :return: an equivalent detector to teh selected component or None
625941b7d10714528d5ffb11
def want_to_rest(self): <NEW_LINE> <INDENT> if self.resting_time > 0: <NEW_LINE> <INDENT> self.resting_time -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.random.random() <= 0.5: <NEW_LINE> <INDENT> self.resting_status = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.resting_status = 0 <NEW_LINE> <DEDENT> self.resting_time = self.random.randint(10, 30) <NEW_LINE> <DEDENT> return self.resting_status
Returns if the agent wants to rest.
625941b730dc7b766590179c
def from_api_response(self, file_version_dict, force_action=None): <NEW_LINE> <INDENT> assert file_version_dict.get('action') is None or force_action is None, 'action was provided by both info_dict and function argument' <NEW_LINE> action = file_version_dict.get('action') or force_action <NEW_LINE> file_name = file_version_dict['fileName'] <NEW_LINE> id_ = file_version_dict['fileId'] <NEW_LINE> if 'size' in file_version_dict: <NEW_LINE> <INDENT> size = file_version_dict['size'] <NEW_LINE> <DEDENT> elif 'contentLength' in file_version_dict: <NEW_LINE> <INDENT> size = file_version_dict['contentLength'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('no size or contentLength') <NEW_LINE> <DEDENT> upload_timestamp = file_version_dict.get('uploadTimestamp') <NEW_LINE> content_type = file_version_dict.get('contentType') <NEW_LINE> content_sha1 = file_version_dict.get('contentSha1') <NEW_LINE> content_md5 = file_version_dict.get('contentMd5') <NEW_LINE> file_info = file_version_dict.get('fileInfo') <NEW_LINE> server_side_encryption = EncryptionSettingFactory.from_file_version_dict(file_version_dict) <NEW_LINE> file_retention = FileRetentionSetting.from_file_version_dict(file_version_dict) <NEW_LINE> legal_hold = LegalHold.from_file_version_dict(file_version_dict) <NEW_LINE> return FileVersion( self.api, id_, file_name, size, content_type, content_sha1, file_info, upload_timestamp, file_version_dict['accountId'], file_version_dict['bucketId'], action, content_md5, server_side_encryption, file_retention, legal_hold, )
Turn this: .. code-block:: python { "action": "hide", "fileId": "4_zBucketName_f103b7ca31313c69c_d20151230_m030117_c001_v0001015_t0000", "fileName": "randomdata", "size": 0, "uploadTimestamp": 1451444477000 } or this: .. code-block:: python { "accountId": "4aa9865d6f00", "bucketId": "547a2a395826655d561f0010", "contentLength": 1350, "contentSha1": "753ca1c2d0f3e8748320b38f5da057767029a036", "contentType": "application/octet-stream", "fileId": "4_z547a2a395826655d561f0010_f106d4ca95f8b5b78_d20160104_m003906_c001_v0001013_t0005", "fileInfo": {}, "fileName": "randomdata", "serverSideEncryption": {"algorithm": "AES256", "mode": "SSE-B2"} } into a :py:class:`b2sdk.v2.FileVersion` object.
625941b7097d151d1a222c8e
def _is_in_range(self, creep): <NEW_LINE> <INDENT> if not creep or not creep.active or creep.dead: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self._get_distance_to_creep(creep) <= self.bounding_radius
checks whether the creep is in range
625941b7d18da76e23532303
def __init__(self, spec): <NEW_LINE> <INDENT> super(AcsKubernetesCluster, self).__init__(spec) <NEW_LINE> self.resource_group = azure_network.GetResourceGroup(self.zone) <NEW_LINE> self.name = 'pkbcluster%s' % FLAGS.run_uri <NEW_LINE> self._deleted = False
Initializes the cluster.
625941b7b57a9660fec336b1
def test_vectorial_operation(): <NEW_LINE> <INDENT> lha = 123 * u.degree <NEW_LINE> dec = 42.3213 * u.degree <NEW_LINE> rotation = xyz_to_uvw_rotation_matrix(lha, dec) <NEW_LINE> xyz_vec_tuple = (e_x, e_x, e_y, e_z, e_y, e_z) <NEW_LINE> uvw_expected_results = [] <NEW_LINE> for vec in xyz_vec_tuple: <NEW_LINE> <INDENT> uvw_expected_results.append(np.dot(rotation, vec)) <NEW_LINE> <DEDENT> xyz_vec_array = np.vstack(xyz_vec_tuple).T <NEW_LINE> assert xyz_vec_array.shape == (3, len(xyz_vec_tuple)) <NEW_LINE> uvw_vectorized_results = np.dot(rotation, xyz_vec_array) <NEW_LINE> assert uvw_vectorized_results.shape == (3, len(xyz_vec_tuple)) <NEW_LINE> uvw_vectorized_results = uvw_vectorized_results.T <NEW_LINE> for idx in range(len(uvw_expected_results)): <NEW_LINE> <INDENT> assert (uvw_expected_results[idx] == uvw_vectorized_results[idx]).all()
It's desirable to make use of Numpy's vector optimizations and transform many XYZ positions in one go, so let's see if that works correctly:
625941b7eab8aa0e5d26d990
def __init__(self, target, payload_provider, timeout, socket_options=(), seed_id=None, verbose=False, output=sys.stdout): <NEW_LINE> <INDENT> self.socket = network.Socket(target, 'icmp', source=None, options=socket_options) <NEW_LINE> self.provider = payload_provider <NEW_LINE> self.timeout = timeout <NEW_LINE> self.responses = ResponseList(verbose=verbose, output=output) <NEW_LINE> self.seed_id = seed_id <NEW_LINE> if self.seed_id is None: <NEW_LINE> <INDENT> self.seed_id = os.getpid() & 0xFFFF
Creates an instance that can handle communication with the target device :param target: IP or hostname of the remote device :type target: str :param payload_provider: An iterable list of payloads to send :type payload_provider: PayloadProvider :param timeout: Timeout that will apply to all ping messages, in seconds :type timeout: int :param socket_options: Options to specify for the network.Socket :type socket_options: tuple :param seed_id: The first ICMP packet ID to use :type seed_id: Union[None, int] :param verbose: Flag to enable verbose mode, defaults to False :type verbose: bool :param output: File where to write verbose output, defaults to stdout :type output: file
625941b78e7ae83300e4adfd
def test_get_info(self): <NEW_LINE> <INDENT> ddoc = DesignDocument(self.db, '_design/ddoc001') <NEW_LINE> ddoc.save() <NEW_LINE> ddoc_remote = DesignDocument(self.db, '_design/ddoc001') <NEW_LINE> ddoc_remote.fetch() <NEW_LINE> info = ddoc_remote.info() <NEW_LINE> info['view_index'].pop('signature') <NEW_LINE> if 'disk_size' in info['view_index']: <NEW_LINE> <INDENT> info['view_index'].pop('disk_size') <NEW_LINE> <DEDENT> if 'data_size' in info['view_index']: <NEW_LINE> <INDENT> info['view_index'].pop('data_size') <NEW_LINE> <DEDENT> if 'sizes' in info['view_index']: <NEW_LINE> <INDENT> info['view_index'].pop('sizes') <NEW_LINE> <DEDENT> if 'updates_pending' in info['view_index']: <NEW_LINE> <INDENT> info['view_index'].pop('updates_pending') <NEW_LINE> <DEDENT> name = 'ddoc001' <NEW_LINE> self.assertEqual( info, {'view_index': {'update_seq': 0, 'waiting_clients': 0, 'language': 'javascript', 'purge_seq': 0, 'compact_running': False, 'waiting_commit': False, 'updater_running': False }, 'name': name })
Test retrieval of info endpoint from the DesignDocument.
625941b7d53ae8145f87a0a8
def _set_up(self): <NEW_LINE> <INDENT> pass
Subclasses my decorate this as a coroutine
625941b70a50d4780f666cc1
def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 1200 <NEW_LINE> self.screen_height = 800 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_speed_factor = 1.5 <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.bullet_speed_factor = 3 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = 60, 60, 60 <NEW_LINE> self.bullets_allowed = 3 <NEW_LINE> self.alien_speed_factor = 1 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.speedup_scale = 1.1 <NEW_LINE> self.initialize_dynamic_settings() <NEW_LINE> self.score_scale = 1.5
初始化屏幕设置
625941b72c8b7c6e89b355f5
def roles_dict(path, repo_prefix=""): <NEW_LINE> <INDENT> exit_if_path_not_found(path) <NEW_LINE> aggregated_roles = {} <NEW_LINE> roles = os.walk(path).next()[1] <NEW_LINE> for role in roles: <NEW_LINE> <INDENT> if is_role(os.path.join(path, role)): <NEW_LINE> <INDENT> if isinstance(role, basestring): <NEW_LINE> <INDENT> role_repo = "{0}{1}".format(repo_prefix, role_name(role)) <NEW_LINE> aggregated_roles[role] = role_repo <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return aggregated_roles
Return a dict of role names and repo paths.
625941b7d58c6744b4257a92
def job_failed( self, job_wrapper, message, exception=False ): <NEW_LINE> <INDENT> pass
Called when a job has failed
625941b721bff66bcd684787
def stemWord(word): <NEW_LINE> <INDENT> global porter <NEW_LINE> return porter.stem(word)
Stems the given word using the Porter Stemmer library Parameters ---------- word : String type A word to be stemmed Returns ------- stemmedWord : String type The stemmed version of the given word
625941b71b99ca400220a8e2
def set_file_log(self): <NEW_LINE> <INDENT> logging.basicConfig(filename=self.FILE_LOG_DEBUG, datefmt="[%Y-%m-%d %H:%M]", format="%(asctime)s - %(name)-2s %(levelname)-2s %(message)s", filemode='a', level=logging.DEBUG) <NEW_LINE> self.log = logging.getLogger(CondeConstants().LOGGER_NAME) <NEW_LINE> formatter = logging.Formatter("%(asctime)s - %(funcName)35s() - %(levelname)-2s %(message)s",datefmt="[%Y-%m-%d %H:%M]") <NEW_LINE> fhlog = logging.FileHandler(self.LOG_FILE) <NEW_LINE> fhlog.setFormatter(formatter) <NEW_LINE> fhlog.setLevel(logging.INFO) <NEW_LINE> self.log.addHandler(fhlog)
Set log file. Args: None Returns: None
625941b797e22403b379cdca
def initialize_tree(BST): <NEW_LINE> <INDENT> if isinstance(BST,tuple): <NEW_LINE> <INDENT> return (initialize_tree(BST[0]),initialize_tree(BST[1])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return BST[0]
that takes a BST and returns a copy of that tree with all the leaves replaced by their first characters BST -> str
625941b763b5f9789fde6f17
def enter_selection_set( self, node: SelectionSetNode, key: Any, parent: Any, path: List[Any], ancestors: List[Any] ) -> None: <NEW_LINE> <INDENT> selections = node.selections <NEW_LINE> if len(selections) == 1 and isinstance(selections[0], InlineFragmentNode): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seen_vertex_field = False <NEW_LINE> for field in selections: <NEW_LINE> <INDENT> if isinstance(field, InlineFragmentNode): <NEW_LINE> <INDENT> raise GraphQLValidationError( "Inline fragments must be the only selection in scope. However, in " "selections {}, an InlineFragment coexists with other selections.".format( selections ) ) <NEW_LINE> <DEDENT> if isinstance(field, FragmentSpreadNode): <NEW_LINE> <INDENT> raise GraphQLValidationError( f"Fragments (not to be confused with inline fragments) are not supported " f"by the compiler. However, in SelectionSetNode {node}'s selections " f"attribute {selections}, the field {field} is a FragmentSpreadNode named " f"{field.name.value}." ) <NEW_LINE> <DEDENT> if not isinstance(field, FieldNode): <NEW_LINE> <INDENT> raise AssertionError( f"The SelectionNode {field} in SelectionSetNode {node}'s selections " f"attribute is not a FieldNode but instead has type {type(field)}." ) <NEW_LINE> <DEDENT> if is_property_field_ast(field): <NEW_LINE> <INDENT> if seen_vertex_field: <NEW_LINE> <INDENT> raise GraphQLValidationError( "In the selections {}, the property field {} occurs after a vertex " "field or a type coercion statement, which is not allowed, as all " "property fields must appear before all vertex fields.".format( node.selections, field ) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> seen_vertex_field = True
Check selections are valid. If selections contains an InlineFragment, check that it is the only inline fragment in scope. Otherwise, check that property fields occur before vertex fields. Args: node: selection set key: The index or key to this node from the parent node or Array. parent: the parent immediately above this node, which may be an Array. path: The key path to get to this node from the root node. ancestors: All nodes and Arrays visited before reaching parent of this node. These correspond to array indices in ``path``. Note: ancestors includes arrays which contain the parent of visited node.
625941b750812a4eaa59c158
def get_cost_updates(self, lr=0.1, persistent=None, k=1): <NEW_LINE> <INDENT> n_vis = self.n_visible <NEW_LINE> n_hid = self.n_hidden <NEW_LINE> pre_sigmoid_ph, ph_mean, ph_sample = self.sample_h_given_v(self.input) <NEW_LINE> tf.assert_rank(ph_sample, 2) <NEW_LINE> _, pv_mean, _ = self.sample_v_given_h(ph_sample) <NEW_LINE> if persistent is None: <NEW_LINE> <INDENT> chain_start = ph_sample <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('presistent is not implemented') <NEW_LINE> raise ValueError <NEW_LINE> <DEDENT> if k == 1: <NEW_LINE> <INDENT> [pre_sigmoid_nvs, nv_means, nv_samples, pre_sigmoid_nhs, nh_means, nh_samples] = self.gibbs_hvh(chain_start) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bat_size = tf.shape(self.input)[0] <NEW_LINE> v_zeros1 = tf.zeros([bat_size, n_vis], dtype=tf.float32) <NEW_LINE> v_zeros2 = tf.zeros([bat_size, n_vis], dtype=tf.float32) <NEW_LINE> v_zeros3 = tf.zeros([bat_size, n_vis], dtype=tf.float32) <NEW_LINE> h_zeros1 = tf.zeros([bat_size, n_hid], dtype=tf.float32) <NEW_LINE> h_zeros2 = tf.zeros([bat_size, n_hid], dtype=tf.float32) <NEW_LINE> state = [v_zeros1, v_zeros2, v_zeros3, h_zeros1, h_zeros2, chain_start] <NEW_LINE> i = tf.constant(0) <NEW_LINE> i_1 = tf.constant(1) <NEW_LINE> def body(i, state): <NEW_LINE> <INDENT> i = tf.add(i, i_1) <NEW_LINE> h_samples = state[-1] <NEW_LINE> state = self.gibbs_hvh(h_samples) <NEW_LINE> return i, state <NEW_LINE> <DEDENT> cond = lambda i, _: tf.less(i, k) <NEW_LINE> final_i, final_state = tf.while_loop(cond, body, [i, state]) <NEW_LINE> [pre_sigmoid_nvs, nv_means, nv_samples, pre_sigmoid_nhs, nh_means, nh_samples] = final_state <NEW_LINE> <DEDENT> chain_end = nv_samples <NEW_LINE> cost = tf.reduce_mean(self.free_energy(self.input)) - tf.reduce_mean( self.free_energy(chain_end)) <NEW_LINE> gradients = self.grads_cd(self.input, ph_mean, nv_means, nh_means) <NEW_LINE> W_ = self.W + lr * gradients[0] <NEW_LINE> hbias_ = self.hbias + lr * gradients[1] <NEW_LINE> vbias_ = self.vbias + lr * gradients[2] <NEW_LINE> train_op = tf.group( (self.W).assign(W_), (self.hbias).assign(hbias_), (self.vbias).assign(vbias_)) <NEW_LINE> if persistent: <NEW_LINE> <INDENT> print('presistent is not implemented') <NEW_LINE> raise ValueError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> monitoring_cost = self.get_reconstruction_cost(pre_sigmoid_nvs) <NEW_LINE> <DEDENT> return cost, monitoring_cost, train_op
This functions implements one step of CD-k (PCD-k) args.: lr : learning rate used to train the RBM persistent: None for CD. For PCD, shared variable containing old state of Gibbs chain. This must be a shared variable of size (batch size, number of hidden units). k : number of Gibbs steps to do in CD-k/PCD-k
625941b70a366e3fb873e649
def make_pastmaclist(): <NEW_LINE> <INDENT> db.pastdata.aggregate([ {"$group": {"_id": {"mac":"$mac"}, }, }, {"$out": "pastmaclist"}, ], allowDiskUse=True, )
pastdataコレクションから過去データ(1min)に含まれるmacアドレスを抽出した pastmaclistコレクションを作成する
625941b7462c4b4f79d1d502
def score(self, emit, target, mask): <NEW_LINE> <INDENT> sen_len, batch_size, labels_num = emit.shape <NEW_LINE> assert (labels_num==self.labels_num) <NEW_LINE> scores = torch.zeros_like(target, dtype=torch.float) <NEW_LINE> scores[1:] += self.transitions[target[:-1], target[1:]] <NEW_LINE> scores += emit.gather(dim=2, index=target.unsqueeze(2)).squeeze(2) <NEW_LINE> score = scores.masked_select(mask).sum() <NEW_LINE> ends = mask.sum(dim=0).view(1, -1) - 1 <NEW_LINE> score += self.strans[target[0]].sum() <NEW_LINE> score += self.etrans[target.gather(dim=0, index=ends)].sum() <NEW_LINE> return score
author: zhangyu return: sum(score)
625941b799cbb53fe6792a19
def validate(data, required_fields): <NEW_LINE> <INDENT> if all(field in data for field in required_fields): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Validate if all required_fields are in the given data dictionary
625941b7a17c0f6771cbde86
def event_attach(self, eventtype, callback, *args, **kwds): <NEW_LINE> <INDENT> if not isinstance(eventtype, EventType): <NEW_LINE> <INDENT> raise VLCException("%s required: %r" % ('EventType', eventtype)) <NEW_LINE> <DEDENT> if not hasattr(callback, '__call__'): <NEW_LINE> <INDENT> raise VLCException("%s required: %r" % ('callable', callback)) <NEW_LINE> <DEDENT> if not any(getargspec(callback)[:2]): <NEW_LINE> <INDENT> raise VLCException("%s required: %r" % ('argument', callback)) <NEW_LINE> <DEDENT> if self._callback_handler is None: <NEW_LINE> <INDENT> _called_from_ctypes = ctypes.CFUNCTYPE(None, ctypes.POINTER(Event), ctypes.c_void_p) <NEW_LINE> @_called_from_ctypes <NEW_LINE> def _callback_handler(event, k): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> call, args, kwds = self._callbacks[k] <NEW_LINE> call(event.contents, *args, **kwds) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self._callback_handler = _callback_handler <NEW_LINE> self._callbacks = {} <NEW_LINE> <DEDENT> k = eventtype.value <NEW_LINE> r = libvlc_event_attach(self, k, self._callback_handler, k) <NEW_LINE> if not r: <NEW_LINE> <INDENT> self._callbacks[k] = (callback, args, kwds) <NEW_LINE> <DEDENT> return r
Register an event notification. @param eventtype: the desired event type to be notified about. @param callback: the function to call when the event occurs. @param args: optional positional arguments for the callback. @param kwds: optional keyword arguments for the callback. @return: 0 on success, ENOMEM on error. @note: The callback function must have at least one argument, an Event instance. Any other, optional positional and keyword arguments are in B{addition} to the first one.
625941b7b5575c28eb68de2f
def test02_distributionZSHY(self): <NEW_LINE> <INDENT> self.zo = Page_ZSHY_distribution(self.driver) <NEW_LINE> self.username = Config().get('HZ_NAME') <NEW_LINE> distribution = getData(2, "HZ_ZSHY") <NEW_LINE> self.A.IntoModule("帐号2直属会员4") <NEW_LINE> i = self.driver.find_element_by_id("mainIframe") <NEW_LINE> self.driver.switch_to.frame(i) <NEW_LINE> self.assertTrue(self.A.select_row(self.username)) <NEW_LINE> self.A.distribution() <NEW_LINE> self.driver.switch_to.default_content() <NEW_LINE> self.zo.input_distribution(str(distribution)) <NEW_LINE> self.A.click_save() <NEW_LINE> self.A.click_ok() <NEW_LINE> time.sleep(1) <NEW_LINE> self.assertTrue(self.l.is_text_in_element(self.A.alert_text, "操作成功", str(self.l.get_text(self.A.alert_text)))) <NEW_LINE> self.A.click_ok() <NEW_LINE> log.info('-------会长发放【直属会员】 用例结束-------')
对直属会员发放
625941b79f2886367277a6c3
def frequency_list_sorting_with_counter(elements: list): <NEW_LINE> <INDENT> frequency = Counter(elements) <NEW_LINE> sorted_frequency = frequency.most_common() <NEW_LINE> result = [] <NEW_LINE> for key, value in sorted_frequency: <NEW_LINE> <INDENT> result.append(key) <NEW_LINE> <DEDENT> return result
Reduce the list to unique elements and sort them by frequency >>> frequency_list_sorting_with_counter([1, 2, 1, 3, 3, 3, 3]) [3, 1, 2] >>> frequency_list_sorting_with_counter([1, 2, 1, 2, 3, 3]) [1, 2, 3] >>> frequency_list_sorting_with_counter(['c', 'c', 'b', 'b', 'b', 'a']) ['b', 'c', 'a']
625941b791af0d3eaac9b846
def take_snapshot(snapmeta): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> logging.warn("Taking snapshot of " + str(snapmeta)) <NEW_LINE> ec2 = boto3.client('ec2') <NEW_LINE> response = ec2.create_snapshot( Description='Automated snap of {} at {} from instance {} named {}'.format(snapmeta['volume'], snapmeta['DeviceName'], snapmeta['instance'], snapmeta['name']), VolumeId=snapmeta['volume'], DryRun=False ) <NEW_LINE> tagger(response['SnapshotId'], snapmeta['owner'], snapmeta['instance'], snapmeta['name']) <NEW_LINE> <DEDENT> except BaseException as e: <NEW_LINE> <INDENT> logging.error('ERROR encountered: ' + str(e) + ' with meta ' + str(snapmeta))
Takes a snapshot, tagging it with the meta data of the instance the volume is attached to
625941b74527f215b584c28d
def launchBenchCast(num_cpus, app_list, outfile, config, bench_cast_seconds, event_file): <NEW_LINE> <INDENT> printColor("Launching bench_cast with apps: %s" % app_list, "blue") <NEW_LINE> exec_cmd = "./bench_cast -c %s -v %s -s %d -l 2 -p %d -e %s %s" % ( config, outfile, bench_cast_seconds, num_cpus, event_file, app_list) <NEW_LINE> runCommand(exec_cmd) <NEW_LINE> printColor("Bench cast Ended", "blue")
Launch BENCHCAST
625941b71b99ca400220a8e3
def test_wrong_data_raise(): <NEW_LINE> <INDENT> signal_interpreter_app.testing = True <NEW_LINE> signal_interpreter_app_instance = signal_interpreter_app.test_client() <NEW_LINE> with patch.object( LoadAndParseJson, "return_signal_by_title", return_value="Service not suported!"): <NEW_LINE> <INDENT> with signal_interpreter_app_instance as client: <NEW_LINE> <INDENT> payload = {"service": "XXXXX"} <NEW_LINE> with pytest.raises(ParserErrorKeyError): <NEW_LINE> <INDENT> client.post("/", json=payload)
Action : Test mocking server answer. Expected Results : No difference from normal application usage. Returns: N/A.
625941b7dd821e528d63afde
def _create_sequence(in_q, out_q): <NEW_LINE> <INDENT> for args in iter(in_q.get, THREAD_STOP): <NEW_LINE> <INDENT> out_q.put(Sequence(args[0], args[1]))
Create sequence worker process Input: in_q: multiprocessing.Queue; work queue out_q; multiprocessing.Queue; sequence queue
625941b76aa9bd52df036bd4
def set_max_speed(self, max_sp, board_ind=0): <NEW_LINE> <INDENT> board_ind = ctypes.c_int16(board_ind) <NEW_LINE> max_sp = ctypes.c_int16(max_sp) <NEW_LINE> self.cmd.send_command(4, (board_ind, max_sp))
sets the max speed to integer value specified
625941b7f9cc0f698b140438
def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> if (self.name in self.__class__.dataMap_.keys()): <NEW_LINE> <INDENT> self.mentions_ = self.__class__.dataMap_[self.name].mentions_ + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mentions_ = 1 <NEW_LINE> <DEDENT> super(Character, self).__init__(self.name, self)
Initializes a character. Args: name: A string containing the character's name.
625941b7d53ae8145f87a0a9
def createDependencies(self, product, version, flavor=None, tag=None, recursive=False): <NEW_LINE> <INDENT> self.unimplemented("createDependencies")
create a list of product dependencies based on what is known from the system. This implementation will look up the product in the EUPS database and analyze its table file. A typical full implementation of this function in a sub-class would call _createDeps() to get the initial list of dependencies, but then iterate through the products, adding the name of the table file and the distId. @param product the name of the product to create the package distribution for @param version the name of the product version @param flavor the flavor of the target platform; this may be ignored by the implentation @param tag where a dependency is loose, prefer the version that is part of this tagged release. This may be ignored by the implementation @param recursive if False, this list will only contain the direct dependencies of this product; otherwise, it will include the dependencies of the dependencies recursively. Default: False
625941b7cb5e8a47e48b78e2
def start_game(stdscr): <NEW_LINE> <INDENT> black_player, white_player = select_players(stdscr) <NEW_LINE> play_game(stdscr, black_player, white_player)
Asks the user to select players for the white and black side and then starts a new game of brandubh with the selected players.
625941b724f1403a9260099c
def has_network_field(value, key="name"): <NEW_LINE> <INDENT> def find(net): <NEW_LINE> <INDENT> return net[key] == value <NEW_LINE> <DEDENT> return find
Returns a function usable as a filter to list_neutron_nets Usage:: active_pred = has_network_field("ACTIVE", key="status") nets = list_neutron_nets(net_cl, filter_fn=active_pred) :param value: The value (of key) to match against :param key: the key in the network object to look up :return: a predicate function that takes a network object (a dict of this form: {'admin_state_up': True, 'id': 'bbcafa75-296a-4a20-bb57-4a0f12ef4bc4', 'mtu': 0, 'name': 'public', 'provider:network_type': 'vxlan', 'provider:physical_network': None, 'provider:segmentation_id': 10, 'router:external': True, 'shared': True, 'status': 'ACTIVE', 'subnets': ['55d174f0-bed4-4699-a4ff-1b738ac50207'], 'tenant_id': '85da67eac420401a960ff47c9c2f3469'} and returns net_obj[key] == value
625941b77cff6e4e811177b9
def user_details(request, stu_id=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_dada = MyUser.objects.get(id=stu_id) <NEW_LINE> <DEDENT> except MyUser.DoesNotExist: <NEW_LINE> <INDENT> messages.error(request, "User Does not exist") <NEW_LINE> return HttpResponseRedirect("/") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render(request, 'user/user_details.html', {"data": user_dada})
get user details :param request: student_id :return:
625941b73c8af77a43ae35d1
def create_user(self, email, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('Users must have an email address') <NEW_LINE> <DEDENT> user = self.model( email=self.normalize_email(email), ) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user
Creates and saves a User
625941b744b2445a33931ed2
def fibsfun(num): <NEW_LINE> <INDENT> fibs = [0, 1] <NEW_LINE> for i in range(num): <NEW_LINE> <INDENT> fibs.append(fibs[-2] + fibs[-1]) <NEW_LINE> <DEDENT> return fibs
hahahahhah
625941b7f8510a7c17cf9537
def run(): <NEW_LINE> <INDENT> logger = logging.getLogger("rc") <NEW_LINE> logger.setLevel(logging.INFO) <NEW_LINE> formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') <NEW_LINE> if args.log_path: <NEW_LINE> <INDENT> file_handler = logging.FileHandler(args.log_path) <NEW_LINE> file_handler.setLevel(logging.INFO) <NEW_LINE> file_handler.setFormatter(formatter) <NEW_LINE> logger.addHandler(file_handler) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> console_handler = logging.StreamHandler() <NEW_LINE> console_handler.setLevel(logging.INFO) <NEW_LINE> console_handler.setFormatter(formatter) <NEW_LINE> logger.addHandler(console_handler) <NEW_LINE> <DEDENT> logger.info('Running with args : {}'.format(args)) <NEW_LINE> multi(4, 10)
Prepares and runs the whole system.
625941b7ab23a570cc24ffb2
def test_letter_count_single(): <NEW_LINE> <INDENT> copied_character_string = copies.mcopies_ofc("10") <NEW_LINE> assert len(copied_character_string) == 10 <NEW_LINE> assert copied_character_string.count("C") == 10
Returns output with correct number of copies
625941b73346ee7daa2b2b9c
def showModalDialog(self): <NEW_LINE> <INDENT> if nukescripts.panels.PythonPanel.showModalDialog(self): <NEW_LINE> <INDENT> self.submit()
Shows the Zync Submit dialog and does the work to submit it.
625941b7d8ef3951e3243370
def clean(params): <NEW_LINE> <INDENT> if os.path.isfile(params.vocab_file): <NEW_LINE> <INDENT> os.remove(params.vocab_file) <NEW_LINE> <DEDENT> if os.path.isfile(params.map_file): <NEW_LINE> <INDENT> os.remove(params.map_file) <NEW_LINE> <DEDENT> if os.path.isdir(params.ckpt_path): <NEW_LINE> <INDENT> shutil.rmtree(params.ckpt_path) <NEW_LINE> <DEDENT> if os.path.isdir(params.summary_path): <NEW_LINE> <INDENT> shutil.rmtree(params.summary_path) <NEW_LINE> <DEDENT> if os.path.isdir(params.result_path): <NEW_LINE> <INDENT> shutil.rmtree(params.result_path) <NEW_LINE> <DEDENT> if os.path.isdir("log"): <NEW_LINE> <INDENT> shutil.rmtree("log") <NEW_LINE> <DEDENT> if os.path.isdir("__pycache__"): <NEW_LINE> <INDENT> shutil.rmtree("__pycache__") <NEW_LINE> <DEDENT> if os.path.isfile(params.config_file): <NEW_LINE> <INDENT> os.remove(params.config_file) <NEW_LINE> <DEDENT> if os.path.isfile(params.vocab_file): <NEW_LINE> <INDENT> os.remove(params.vocab_file)
重新训练前清除函数 Clean current folder remove saved model and training log
625941b799cbb53fe6792a1a
def allow(self, access_type, access): <NEW_LINE> <INDENT> self._validate_access(access_type, access) <NEW_LINE> return self.manager.allow(self, access_type, access)
Allow access to a share.
625941b73539df3088e2e17e
def get_open_orders(self): <NEW_LINE> <INDENT> orders = [] <NEW_LINE> for o in self.api.list_orders(): <NEW_LINE> <INDENT> orders.append([o.symbol, o.side, o.qty, o.filled_qty, o.submitted_at]) <NEW_LINE> <DEDENT> time.sleep(60/200) <NEW_LINE> return orders
Gets the users open orders from the Alpaca API as Python Lists
625941b72ae34c7f2600cf64
def _playsoundWin(sound, block = True): <NEW_LINE> <INDENT> sound = '"' + _canonicalizePath(sound) + '"' <NEW_LINE> from ctypes import create_unicode_buffer, windll, wintypes <NEW_LINE> from time import sleep <NEW_LINE> windll.winmm.mciSendStringW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, wintypes.UINT, wintypes.HANDLE] <NEW_LINE> windll.winmm.mciGetErrorStringW.argtypes = [wintypes.DWORD, wintypes.LPWSTR, wintypes.UINT] <NEW_LINE> def winCommand(*command): <NEW_LINE> <INDENT> bufLen = 600 <NEW_LINE> buf = create_unicode_buffer(bufLen) <NEW_LINE> command = ' '.join(command) <NEW_LINE> errorCode = int(windll.winmm.mciSendStringW(command, buf, bufLen - 1, 0)) <NEW_LINE> if errorCode: <NEW_LINE> <INDENT> errorBuffer = create_unicode_buffer(bufLen) <NEW_LINE> windll.winmm.mciGetErrorStringW(errorCode, errorBuffer, bufLen - 1) <NEW_LINE> exceptionMessage = ('\n Error ' + str(errorCode) + ' for command:' '\n ' + command + '\n ' + errorBuffer.value) <NEW_LINE> logger.error(exceptionMessage) <NEW_LINE> raise PlaysoundException(exceptionMessage) <NEW_LINE> <DEDENT> return buf.value <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> logger.debug('Starting') <NEW_LINE> winCommand(u'open {}'.format(sound)) <NEW_LINE> winCommand(u'play {}{}'.format(sound, ' wait' if block else '')) <NEW_LINE> logger.debug('Returning') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> winCommand(u'close {}'.format(sound)) <NEW_LINE> <DEDENT> except PlaysoundException: <NEW_LINE> <INDENT> logger.warning(u'Failed to close the file: {}'.format(sound)) <NEW_LINE> pass
Utilizes windll.winmm. Tested and known to work with MP3 and WAVE on Windows 7 with Python 2.7. Probably works with more file formats. Probably works on Windows XP thru Windows 10. Probably works with all versions of Python. Inspired by (but not copied from) Michael Gundlach <[email protected]>'s mp3play: https://github.com/michaelgundlach/mp3play I never would have tried using windll.winmm without seeing his code.
625941b73cc13d1c6d3c71b7
def step(self): <NEW_LINE> <INDENT> self.get_info() <NEW_LINE> self.datacollector.collect(self) <NEW_LINE> self.schedule.step()
advance model by one step
625941b7baa26c4b54cb0f56
def test_reordering_non_sequential(sorted_entries_gaps): <NEW_LINE> <INDENT> qs = SortedModel.objects <NEW_LINE> nodes = sorted_entries_gaps <NEW_LINE> operations = {nodes[5].pk: -1, nodes[2].pk: +3} <NEW_LINE> expected = _sorted_by_order( [ (nodes[0].pk, 0), (nodes[1].pk, 2), (nodes[2].pk, 4 + (3 * 2) - 1), (nodes[3].pk, 6 - 1), (nodes[4].pk, 8 + 1 - 1), (nodes[5].pk, 10 - (1 * 2) - 1), ] ) <NEW_LINE> perform_reordering(qs, operations) <NEW_LINE> actual = _get_sorted_map() <NEW_LINE> assert actual == expected
Ensures that reordering non-sequential sort order values is properly handled. This case happens when an item gets deleted, creating gaps between values.
625941b760cbc95b062c637c
def set_timeout_async(f, timeout_ms = 0): <NEW_LINE> <INDENT> sublime_api.set_timeout_async(f, timeout_ms)
Schedules a function to be called in the future. The function will be called in a worker thread, and Sublime Text will not block while the function is running
625941b785dfad0860c3ac8b