code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def check(self, credential, operation, hrn = None): <NEW_LINE> <INDENT> cred = Credential(cred=credential) <NEW_LINE> self.client_cred = cred <NEW_LINE> logger.debug("Auth.check: handling hrn=%s and credential=%s"% (hrn,cred.pretty_cred())) <NEW_LINE> if cred.type not in ['geni_sfa']: <NEW_LINE> <INDENT> raise CredentialNotVerifiable(cred.type, "%s not supported" % cred.type) <NEW_LINE> <DEDENT> self.client_gid = self.client_cred.get_gid_caller() <NEW_LINE> self.object_gid = self.client_cred.get_gid_object() <NEW_LINE> if not self.client_gid: <NEW_LINE> <INDENT> raise MissingCallerGID(self.client_cred.pretty_subject()) <NEW_LINE> <DEDENT> if self.peer_cert: <NEW_LINE> <INDENT> self.verifyPeerCert(self.peer_cert, self.client_gid) <NEW_LINE> <DEDENT> if operation: <NEW_LINE> <INDENT> if not self.client_cred.can_perform(operation): <NEW_LINE> <INDENT> raise InsufficientRights(operation) <NEW_LINE> <DEDENT> <DEDENT> if self.trusted_cert_list: <NEW_LINE> <INDENT> self.client_cred.verify(self.trusted_cert_file_list, self.config.SFA_CREDENTIAL_SCHEMA) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise MissingTrustedRoots(self.config.get_trustedroots_dir()) <NEW_LINE> <DEDENT> trusted_peers = [gid.get_hrn() for gid in self.trusted_cert_list] <NEW_LINE> if hrn and self.client_gid.get_hrn() not in trusted_peers: <NEW_LINE> <INDENT> target_hrn = self.object_gid.get_hrn() <NEW_LINE> if not hrn == target_hrn: <NEW_LINE> <INDENT> raise PermissionError("Target hrn: %s doesn't match specified hrn: %s " % (target_hrn, hrn) ) <NEW_LINE> <DEDENT> <DEDENT> return True | Check the credential against the peer cert (callerGID) included
in the credential matches the caller that is connected to the
HTTPS connection, check if the credential was signed by a
trusted cert and check if the credential is allowed to perform
the specified operation. | 625941ba85dfad0860c3acef |
def get_strain_state_dict(strains, stresses, eq_stress=None, tol=1e-10, add_eq=True, sort=True): <NEW_LINE> <INDENT> vstrains = np.array([Strain(s).zeroed(tol).voigt for s in strains]) <NEW_LINE> vstresses = np.array([Stress(s).zeroed(tol).voigt for s in stresses]) <NEW_LINE> independent = {tuple(np.nonzero(vstrain)[0].tolist()) for vstrain in vstrains} <NEW_LINE> strain_state_dict = OrderedDict() <NEW_LINE> if add_eq: <NEW_LINE> <INDENT> if eq_stress is not None: <NEW_LINE> <INDENT> veq_stress = Stress(eq_stress).voigt <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> veq_stress = find_eq_stress(strains, stresses).voigt <NEW_LINE> <DEDENT> <DEDENT> for n, ind in enumerate(independent): <NEW_LINE> <INDENT> template = np.zeros(6, dtype=bool) <NEW_LINE> np.put(template, ind, True) <NEW_LINE> template = np.tile(template, [vstresses.shape[0], 1]) <NEW_LINE> mode = (template == (np.abs(vstrains) > 1e-10)).all(axis=1) <NEW_LINE> mstresses = vstresses[mode] <NEW_LINE> mstrains = vstrains[mode] <NEW_LINE> min_nonzero_ind = np.argmin(np.abs(np.take(mstrains[-1], ind))) <NEW_LINE> min_nonzero_val = np.take(mstrains[-1], ind)[min_nonzero_ind] <NEW_LINE> strain_state = mstrains[-1] / min_nonzero_val <NEW_LINE> strain_state = tuple(strain_state) <NEW_LINE> if add_eq: <NEW_LINE> <INDENT> mstrains = np.vstack([mstrains, np.zeros(6)]) <NEW_LINE> mstresses = np.vstack([mstresses, veq_stress]) <NEW_LINE> <DEDENT> if sort: <NEW_LINE> <INDENT> mstresses = mstresses[mstrains[:, ind[0]].argsort()] <NEW_LINE> mstrains = mstrains[mstrains[:, ind[0]].argsort()] <NEW_LINE> <DEDENT> strain_state_dict[strain_state] = {"strains": mstrains, "stresses": mstresses} <NEW_LINE> <DEDENT> return strain_state_dict | Creates a dictionary of voigt-notation stress-strain sets
keyed by "strain state", i. e. a tuple corresponding to
the non-zero entries in ratios to the lowest nonzero value,
e.g. [0, 0.1, 0, 0.2, 0, 0] -> (0,1,0,2,0,0)
This allows strains to be collected in stencils as to
evaluate parameterized finite difference derivatives
Args:
strains (Nx3x3 array-like): strain matrices
stresses (Nx3x3 array-like): stress matrices
eq_stress (Nx3x3 array-like): equilibrium stress
tol (float): tolerance for sorting strain states
add_eq (bool): flag for whether to add eq_strain
to stress-strain sets for each strain state
sort (bool): flag for whether to sort strain states
Returns:
OrderedDict with strain state keys and dictionaries
with stress-strain data corresponding to strain state | 625941baf548e778e58cd412 |
def post(self): <NEW_LINE> <INDENT> rating = self.request.get("rating") <NEW_LINE> movie_id = self.request.get("movie") <NEW_LINE> movie = Movie.get_by_id( int(movie_id) ) <NEW_LINE> if movie and rating: <NEW_LINE> <INDENT> movie.rating = rating <NEW_LINE> movie.put() <NEW_LINE> t = jinja_env.get_template("rating-confirmation.html") <NEW_LINE> response = t.render(movie = movie) <NEW_LINE> self.response.write(response) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.renderError(400) | User wants to rate a movie | 625941baa219f33f3462880a |
def ComputeStatistics(self, approx_ok): <NEW_LINE> <INDENT> if approx_ok == 0: <NEW_LINE> <INDENT> approx_ok = False <NEW_LINE> <DEDENT> elif approx_ok == 1: <NEW_LINE> <INDENT> approx_ok = True <NEW_LINE> <DEDENT> return _gdal.Band_ComputeStatistics(self, approx_ok) | ComputeStatistics(Band self, bool approx_ok, GDALProgressFunc callback=0, void * callback_data=None) -> CPLErr | 625941ba5166f23b2e1a4fef |
def get_halfmoves(self): <NEW_LINE> <INDENT> return sorted(self.history.keys()) | Return all the moves in the history | 625941ba3eb6a72ae02ec36a |
@login_required() <NEW_LINE> def squares(request): <NEW_LINE> <INDENT> user_id = str(request.user.id) <NEW_LINE> userprofile = UserProfile.objects.filter(user_id=user_id) <NEW_LINE> profile = {} <NEW_LINE> if userprofile: <NEW_LINE> <INDENT> userprofile = userprofile[0] <NEW_LINE> profile['nickname'] = userprofile.nickname <NEW_LINE> profile['signature'] = userprofile.signature <NEW_LINE> profile['avatar'] = userprofile.avatar <NEW_LINE> <DEDENT> c = RequestContext(request, { 'profile':profile }) <NEW_LINE> return render_to_response('squares.html',c) | 首页 | 625941bafbf16365ca6f6053 |
def session_middleware( app: web.Application, storage: 'AbstractStorage' ) -> middleware: <NEW_LINE> <INDENT> if not isinstance(storage, AbstractStorage): <NEW_LINE> <INDENT> raise RuntimeError(f"Expected an AbstractStorage got {storage!s}") <NEW_LINE> <DEDENT> @web.middleware <NEW_LINE> async def middleware( request: web.Request, handler: Handler ) -> web.StreamResponse: <NEW_LINE> <INDENT> request[SESSION_STORAGE] = storage <NEW_LINE> try: <NEW_LINE> <INDENT> response = await handler(request) <NEW_LINE> <DEDENT> except web.HTTPException as exc: <NEW_LINE> <INDENT> raise exc <NEW_LINE> <DEDENT> if not isinstance(response, (web.StreamResponse, web.HTTPException)): <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> if response.prepared: <NEW_LINE> <INDENT> raise RuntimeError( "We Cannot save session data into on prepared responses" ) <NEW_LINE> <DEDENT> session = request.get(SESSION_OBJECT) <NEW_LINE> if isinstance(session, SessionData): <NEW_LINE> <INDENT> if session.is_changed: <NEW_LINE> <INDENT> await storage.save_session(request, response, session) <NEW_LINE> <DEDENT> <DEDENT> return response <NEW_LINE> <DEDENT> return middleware | Middleware to attach Session Storage to every Request. | 625941ba07f4c71912b1131d |
def zero_loss(y_true, y_pred): <NEW_LINE> <INDENT> return K.constant(0.) | ZERO loss (above the validation classes head) according to Keras API.
This makes sure that no computations are made
through the validation classes head. | 625941ba293b9510aa2c312e |
def prepare_project_object(self, project_name): <NEW_LINE> <INDENT> Validator.validate_project_name(project_name) <NEW_LINE> project = Project(project_name) <NEW_LINE> return project | Validates the project name and returns the new Project object.
Throws the validation error if something is wrong | 625941ba8a349b6b435e800a |
def run_kdump(test, params, env): <NEW_LINE> <INDENT> vm = env.get_vm(params["main_vm"]) <NEW_LINE> vm.verify_alive() <NEW_LINE> timeout = float(params.get("login_timeout", 240)) <NEW_LINE> crash_timeout = float(params.get("crash_timeout", 360)) <NEW_LINE> session = vm.wait_for_login(timeout=timeout) <NEW_LINE> def_kernel_param_cmd = ("grubby --update-kernel=`grubby --default-kernel`" " --args=crashkernel=128M") <NEW_LINE> kernel_param_cmd = params.get("kernel_param_cmd", def_kernel_param_cmd) <NEW_LINE> def_kdump_enable_cmd = "chkconfig kdump on && service kdump start" <NEW_LINE> kdump_enable_cmd = params.get("kdump_enable_cmd", def_kdump_enable_cmd) <NEW_LINE> def_crash_kernel_prob_cmd = "grep -q 1 /sys/kernel/kexec_crash_loaded" <NEW_LINE> crash_kernel_prob_cmd = params.get("crash_kernel_prob_cmd", def_crash_kernel_prob_cmd) <NEW_LINE> def crash_test(vcpu): <NEW_LINE> <INDENT> session = vm.wait_for_login(timeout=timeout) <NEW_LINE> session.cmd_output("rm -rf /var/crash/*") <NEW_LINE> logging.info("Triggering crash on vcpu %d ...", vcpu) <NEW_LINE> crash_cmd = "taskset -c %d echo c > /proc/sysrq-trigger" % vcpu <NEW_LINE> session.sendline(crash_cmd) <NEW_LINE> if not virt_utils.wait_for(lambda: not session.is_responsive(), 240, 0, 1): <NEW_LINE> <INDENT> raise error.TestFail("Could not trigger crash on vcpu %d" % vcpu) <NEW_LINE> <DEDENT> logging.info("Waiting for kernel crash dump to complete") <NEW_LINE> session = vm.wait_for_login(timeout=crash_timeout) <NEW_LINE> logging.info("Probing vmcore file...") <NEW_LINE> session.cmd("ls -R /var/crash | grep vmcore") <NEW_LINE> logging.info("Found vmcore.") <NEW_LINE> session.cmd_output("rm -rf /var/crash/*") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> logging.info("Checking the existence of crash kernel...") <NEW_LINE> try: <NEW_LINE> <INDENT> session.cmd(crash_kernel_prob_cmd) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logging.info("Crash kernel is not loaded. Trying to load it") <NEW_LINE> session.cmd(kernel_param_cmd) <NEW_LINE> session = vm.reboot(session, timeout=timeout) <NEW_LINE> <DEDENT> logging.info("Enabling kdump service...") <NEW_LINE> session.cmd(kdump_enable_cmd, timeout=120) <NEW_LINE> nvcpu = int(params.get("smp", 1)) <NEW_LINE> for i in range (nvcpu): <NEW_LINE> <INDENT> crash_test(i) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() | KVM reboot test:
1) Log into a guest
2) Check and enable the kdump
3) For each vcpu, trigger a crash and check the vmcore
@param test: kvm test object
@param params: Dictionary with the test parameters
@param env: Dictionary with test environment. | 625941ba0a366e3fb873e6ad |
def test_add_thin_provisioning_with_holes_in_file(self): <NEW_LINE> <INDENT> chunk_size = units.Ki <NEW_LINE> content = b"*" * chunk_size + b"\x00" * chunk_size + b"*" * chunk_size <NEW_LINE> self._do_test_thin_provisioning(content, 3 * chunk_size, 1, 2, True) | Tests that a file which contains null bytes chunks is sparsified
with a thin provisioning configuration. | 625941ba4a966d76dd550ea2 |
def in_board_coord(self, coord): <NEW_LINE> <INDENT> col = coord[0] <NEW_LINE> row = int(coord[1:]) <NEW_LINE> if col in self.x_axis_range and row in self.y_axis_range: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | method that will be utilized in the Piece class and make
move method. Will check if a coord_param corresponds to legal bord coordinates. | 625941ba1b99ca400220a947 |
def output(self, pgs=None, cells_csv_filename=None, cells_json_filename=None, cells_xml_filename=None, table_csv_filename=None, table_html_filename=None, table_list_filename=None, name=None, output_type=None): <NEW_LINE> <INDENT> for pg, page in self.pages.items(): <NEW_LINE> <INDENT> cells = self.cells(pg) <NEW_LINE> text = self.texts(pg) <NEW_LINE> pref = "page-{:04d}".format(pg) <NEW_LINE> output( cells, text=text, pgs=None, prefix=pref, cells_csv_filename=cells_csv_filename, cells_json_filename=cells_json_filename, cells_xml_filename=cells_xml_filename, table_csv_filename=table_csv_filename, table_html_filename=table_html_filename, table_list_filename=table_list_filename, infile=self.infile, name=name, output_type=output_type) | Output recognition result in various
formats defined by parameters. | 625941ba6fece00bbac2d5d1 |
def negSamplingLossAndGradient( centerWordVec, outsideWordIdx, outsideVectors, dataset, K=10 ): <NEW_LINE> <INDENT> negSampleWordIndices = getNegativeSamples(outsideWordIdx, dataset, K) <NEW_LINE> indices = [outsideWordIdx] + negSampleWordIndices <NEW_LINE> loss = 0.0 <NEW_LINE> gradCenterVec = np.zeros(centerWordVec.shape) <NEW_LINE> gradOutsideVecs = np.zeros(outsideVectors.shape) <NEW_LINE> y_hat = sigmoid(np.dot(outsideVectors[outsideWordIdx], centerWordVec)) <NEW_LINE> loss = -np.log(y_hat) <NEW_LINE> gradOutsideVecs[outsideWordIdx] += centerWordVec * (y_hat - 1) <NEW_LINE> gradCenterVec += outsideVectors[outsideWordIdx] * (y_hat - 1) <NEW_LINE> return loss, gradCenterVec, gradOutsideVecs | Negative sampling loss function for word2vec models
Implement the negative sampling loss and gradients for a centerWordVec
and a outsideWordIdx word vector as a building block for word2vec
models. K is the number of negative samples to take.
Note: The same word may be negatively sampled multiple times. For
example if an outside word is sampled twice, you shall have to
double count the gradient with respect to this word. Thrice if
it was sampled three times, and so forth.
Arguments/Return Specifications: same as naiveSoftmaxLossAndGradient | 625941ba711fe17d82542208 |
def _ini_config_file(self): <NEW_LINE> <INDENT> self.config.add_section('Section_w1') <NEW_LINE> self.config.add_section('Section_w2') <NEW_LINE> self.config.add_section('Section_w3') <NEW_LINE> self.config.add_section('Section_units') <NEW_LINE> self.config.add_section('Section_file') | Initialise la configuration si le fichier n'est pas trouvé | 625941ba796e427e537b0459 |
def runs_parallel(with_bubble_of=None): <NEW_LINE> <INDENT> def real_decorator(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def inner(*args, **kwargs): <NEW_LINE> <INDENT> Random.atfork() <NEW_LINE> return func(*args, **kwargs) <NEW_LINE> <DEDENT> _parallel.add(func.__name__) <NEW_LINE> if is_sequential(func): <NEW_LINE> <INDENT> _sequential.remove(func.__name__) <NEW_LINE> <DEDENT> inner._pool_size = with_bubble_of <NEW_LINE> return inner <NEW_LINE> <DEDENT> if type(with_bubble_of) == type(real_decorator): <NEW_LINE> <INDENT> return real_decorator(with_bubble_of) <NEW_LINE> <DEDENT> return real_decorator | Decorator explicitly specifying that a function be run in parallel,
since the default mode of operation is to be sequential. | 625941ba92d797404e30401f |
def test_get_deployments_no_kind(yml_with_no_kind): <NEW_LINE> <INDENT> data = yaml_parser.read_yaml(yml_with_no_kind) <NEW_LINE> deployments = yaml_parser.get_deployments(data) <NEW_LINE> assert not deployments | Test we get no exceptions (but also no deployments) if we don't
have any "kind" in our yml | 625941ba9f2886367277a727 |
def fast_differenz(self, m): <NEW_LINE> <INDENT> if not isinstance(m, NatuerlicheZahl): <NEW_LINE> <INDENT> m = NatuerlicheZahl(m) <NEW_LINE> <DEDENT> for d in Zaehlen(0, self): <NEW_LINE> <INDENT> if m == self: <NEW_LINE> <INDENT> return d <NEW_LINE> <DEDENT> m = m.next() <NEW_LINE> <DEDENT> return None | Berechnet die Differenz mit einer Ganzzahl m.
Laufzeitoptimierte Variante von differenz():
Summand m wird gemeinsam mit d hoch gezaehlt und
dadurch die zusaetzliche Addition m + d in jedem
Schritt eingespart.
:param m: NatuerlicheZahl (pos. Ganzzahl oder 0).
:return: Differenz d = n-m oder None, falls m > n. | 625941ba6fece00bbac2d5d2 |
def test_the_points_dont_define_a_plane(self): <NEW_LINE> <INDENT> plane = [[1, 2, 1], [2, 3, 2], [3, 4, 3]] <NEW_LINE> point = [4, 3] <NEW_LINE> self.assertFalse(project_point_on_plane(plane, point)) | The three points that define the plane are colinear. | 625941ba091ae35668666dfb |
def _createPhotoBackendVocabulary(): <NEW_LINE> <INDENT> for backend in ['ldap', 'ofs']: <NEW_LINE> <INDENT> term = SimpleTerm(value=backend, token=backend, title=backend) <NEW_LINE> yield term | Create zope.schema vocabulary from available photo backends.
@return: Iterable of SimpleTerm objects | 625941bab5575c28eb68de94 |
def _validate_string(self, param_name): <NEW_LINE> <INDENT> given = self.request.validated_params[param_name] <NEW_LINE> if isinstance(given, str) or isinstance(given, unicode): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return ( "Value {val} for argument {param} is not a string.".format( val=given, param=param_name ) ) | Validate that the given param_name argument value in the query is a
string and return False if it is; return an error message if it isn't. | 625941bae5267d203edcdb37 |
def getNextLink(self): <NEW_LINE> <INDENT> return self.base.get("nextLink", []) | A link to the next page of changes | 625941ba99fddb7c1c9de229 |
def test_put_invalid_request(self): <NEW_LINE> <INDENT> new = {"foo": "blaaaaaah"} <NEW_LINE> request = construct_dummy_request(method="PUT", data=json.dumps(new)) <NEW_LINE> content = self.resource.render(request) <NEW_LINE> self.assertEqual(request.responseCode, 400) <NEW_LINE> data = json.loads(content) <NEW_LINE> self.assertTrue("error" in data, data) | Invalid param types should be caught by handler and indicated in response.
We are not testing the full suite of possible bad params possible --
this should be already tested in simulation.parse_pipe_params unit tests. | 625941ba004d5f362079a1cd |
def npc_ai(name): <NEW_LINE> <INDENT> if life_list[name]["hostile"] == True: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass | name: the living object in reference
this will take some consideration into the idea
of code optimization, considering how many times this
functional will be ran | 625941ba55399d3f0558854a |
def get_GPList(self): <NEW_LINE> <INDENT> pass | Return the stored list of geographic points
:rtype: list
>>> import SimpleGIS
>>> l = []
>>> l.append(SimpleGIS.GeographicPoint(10,20))
>>> l.append(SimpleGIS.GeographicPoint(0,0))
>>> p = Polygon(l)
>>> l2 = p.get_GPList();
>>> print l2[0].get_latitude();
10 | 625941ba6e29344779a624ab |
def connection_exists(self, index_or_alias): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._connections._resolve_alias_or_index(index_or_alias) <NEW_LINE> return True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False | Validates whether connection or session exists or not
``index_or_alias Index or Alias of the session to be validate of its existance
returns True if connection exists, False otherwise | 625941bab7558d58953c4db1 |
def _parse_start(self, item): <NEW_LINE> <INDENT> return datetime.strptime(item["start_date"], "%Y-%m-%d %H:%M:%S") | Parse start datetime as a naive datetime object. | 625941ba8e7ae83300e4ae62 |
def callback_added(self, item): <NEW_LINE> <INDENT> callback, type, button = item <NEW_LINE> picker = getattr(self.scene.scene.picker, '%spicker' % type) <NEW_LINE> self._active_pickers[type] = picker <NEW_LINE> if not type in self._picker_callback_nbs: <NEW_LINE> <INDENT> self._picker_callback_nbs[type] = picker.add_observer("EndPickEvent", self.on_pick) <NEW_LINE> <DEDENT> if VTK_VERSION>5: <NEW_LINE> <INDENT> move_event = "RenderEvent" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> move_event = 'MouseMoveEvent' <NEW_LINE> <DEDENT> if not self._mouse_mvt_callback_nb: <NEW_LINE> <INDENT> self._mouse_mvt_callback_nb = self.scene.scene.interactor.add_observer(move_event, self.on_mouse_move) <NEW_LINE> <DEDENT> if not button in self._mouse_press_callback_nbs: <NEW_LINE> <INDENT> self._mouse_press_callback_nbs[button] = self.scene.scene.interactor.add_observer( '%sButtonPressEvent' % button, self.on_button_press) <NEW_LINE> <DEDENT> if VTK_VERSION>5: <NEW_LINE> <INDENT> release_event = "EndInteractionEvent" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> release_event = '%sButtonReleaseEvent' % button <NEW_LINE> <DEDENT> if not button in self._mouse_release_callback_nbs: <NEW_LINE> <INDENT> self._mouse_release_callback_nbs[button] = self.scene.scene.interactor.add_observer( release_event, self.on_button_release) | Wire up the different VTK callbacks.
| 625941ba1f037a2d8b946095 |
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InstitutionsResponseBody): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ | Returns true if both objects are equal | 625941bafb3f5b602dac3526 |
def _specified_ants_have_moved(self, ant_list: List[Ant]) -> bool: <NEW_LINE> <INDENT> if not ant_list: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ant_moves = 0 <NEW_LINE> for ant in ant_list: <NEW_LINE> <INDENT> if ant.hasMoved: <NEW_LINE> <INDENT> ant_moves += 1 <NEW_LINE> <DEDENT> <DEDENT> if ant_moves == len(ant_list): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | _specified_ants_have_moved
Helper method that determines if all of the given ants have moved.
:param ant_list: The specified ants to check.
:return: True if all of those ants have moved. Otherwise, returns False. | 625941badc8b845886cb53cb |
def check_dependencies(tools): <NEW_LINE> <INDENT> print("Checking required dependencies......") <NEW_LINE> if isinstance(tools, list): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tools = [tools] <NEW_LINE> <DEDENT> for t in tools: <NEW_LINE> <INDENT> subp = subprocess.Popen(["which", t], stderr=subprocess.PIPE, stdout=subprocess.PIPE) <NEW_LINE> if subp.stderr.read(): <NEW_LINE> <INDENT> print("\033[1;31m" + "OSError: \033[1;33m" + __file__ + " requires " + t + "\033[0m") | Ensure required tools are present. | 625941ba3c8af77a43ae3634 |
def remove_3d(self, ele): <NEW_LINE> <INDENT> self.graph.render_3d.remove(ele) | Remove a 3d object from the scene. | 625941bafff4ab517eb2f2d0 |
def harmonic_i2dec(freq, time, n=5): <NEW_LINE> <INDENT> quad_dec = np.arange(1, n+1)**2 <NEW_LINE> return harmonic(freq, time, n=n, decay=quad_dec) | sine wave with n harmonics with quadratic decay
time: ndarray | 625941ba4428ac0f6e5ba689 |
def email_alias(self, alias_id: str) -> 'EmailAlias': <NEW_LINE> <INDENT> return self.translator.get('email_alias')(session=self._session, object_id=alias_id) | Initialize a :class: `EmailAlias` object, whose box id is alias_id.
:param alias_id:
The aliad id of the :class:`EmailAlias` object.
:return:
A :class:`EmailAlias` object with the given entry ID. | 625941bad58c6744b4257af7 |
def findNumberOfLIS(self, nums): <NEW_LINE> <INDENT> dp = [1 for _ in range(len(nums))] <NEW_LINE> cnt = [1 for _ in range(len(nums))] <NEW_LINE> maxLen = 1 <NEW_LINE> for i in range(1, len(nums)): <NEW_LINE> <INDENT> for j in range(i): <NEW_LINE> <INDENT> if nums[j] >= nums[i]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if dp[j] + 1 > dp[i]: <NEW_LINE> <INDENT> dp[i] = dp[j] + 1 <NEW_LINE> cnt[i] = cnt[j] <NEW_LINE> <DEDENT> elif dp[j] + 1 == dp[i]: <NEW_LINE> <INDENT> cnt[i] += cnt[j] <NEW_LINE> <DEDENT> <DEDENT> maxLen = max(maxLen, dp[i]) <NEW_LINE> <DEDENT> result = 0 <NEW_LINE> for i, d in enumerate(dp): <NEW_LINE> <INDENT> if d == maxLen: <NEW_LINE> <INDENT> result += cnt[i] <NEW_LINE> <DEDENT> <DEDENT> return result | :type nums: List[int]
:rtype: int | 625941bacdde0d52a9e52ec6 |
def auth(username, password): <NEW_LINE> <INDENT> users = get_users() <NEW_LINE> _password = users.get(username) <NEW_LINE> if not password: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return _password == password | 根据用户名和密码确认用户是否登录 | 625941ba925a0f43d2549d0a |
def includeme(config): <NEW_LINE> <INDENT> settings = config.get_settings() <NEW_LINE> config.include('pyramid_tm') <NEW_LINE> session_factory = get_session_factory(get_engine(settings)) <NEW_LINE> config.registry['dbsession_factory'] = session_factory <NEW_LINE> config.add_request_method( lambda r: get_tm_session(session_factory, r.tm), 'dbsession', reify=True ) | Initialize the model for a Pyramid app.
Activate this setup using ``config.include('gout-se.models')``. | 625941ba66656f66f7cbc041 |
def get_experiment_info(): <NEW_LINE> <INDENT> return _experiment_label_info | Retrieve the current experiment level information.
Example:
>>> import heputils
>>> heputils.plot.set_style("ATLAS")
>>> for key, info in heputils.plot.get_experiment_info().items():
... print(f"{key}: {info}")
...
name: atlas
status: Internal
center_of_mass_energy: 13
center_of_mass_energy_units: TeV
luminosity: 132
luminosity_units: fb
Returns:
dict: The dictionary of descriptors of the experiment. | 625941ba8e71fb1e9831d644 |
def policy_assignment_delete(name, scope, **kwargs): <NEW_LINE> <INDENT> result = False <NEW_LINE> polconn = __utils__['azurearm.get_client']('policy', **kwargs) <NEW_LINE> try: <NEW_LINE> <INDENT> policy = polconn.policy_assignments.delete( policy_assignment_name=name, scope=scope ) <NEW_LINE> result = True <NEW_LINE> <DEDENT> except CloudError as exc: <NEW_LINE> <INDENT> __utils__['azurearm.log_cloud_error']('resource', str(exc), **kwargs) <NEW_LINE> <DEDENT> return result | .. versionadded:: Fluorine
Delete a policy assignment.
:param name: The name of the policy assignment to delete.
:param scope: The scope of the policy assignment.
CLI Example:
.. code-block:: bash
salt-call azurearm_resource.policy_assignment_delete testassign /subscriptions/bc75htn-a0fhsi-349b-56gh-4fghti-f84852 | 625941baf9cc0f698b14049c |
def __unicode__(self): <NEW_LINE> <INDENT> return self.name | :returns: name string | 625941ba16aa5153ce36230f |
@app.route('/register', methods=['POST']) <NEW_LINE> def register_user(): <NEW_LINE> <INDENT> email = request.form.get("email") <NEW_LINE> password = request.form.get("password") <NEW_LINE> age = request.form.get("age") <NEW_LINE> zipcode = request.form.get("zipcode") <NEW_LINE> user_check = User.query.filter_by(email=email).first() <NEW_LINE> if user_check: <NEW_LINE> <INDENT> flash('You have already registered. Please log in.') <NEW_LINE> return redirect('/login') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user = User(email=email, password=password, age=age, zipcode=zipcode) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> return redirect("/") | Checks db for user and adds user if new | 625941ba24f1403a92600a00 |
def get_torque_grav(self, x): <NEW_LINE> <INDENT> return -self.m_F * self.c_F * self.g * np.cos(np.deg2rad(x[1])) | :param x: state variables [activation level; foot's absolute orientation wrt horizontal axis; foot's absolute rotational velocity]
:return: gravity torque of the foot around the ankle | 625941ba76e4537e8c35150f |
def __init__(self, *args): <NEW_LINE> <INDENT> _coda_except.InvalidFormatException_swiginit(self, _coda_except.new_InvalidFormatException(*args)) | __init__(InvalidFormatException self) -> InvalidFormatException
__init__(InvalidFormatException self, InvalidFormatException arg2) -> InvalidFormatException
__init__(InvalidFormatException self, InvalidFormatException arg2) -> InvalidFormatException
__init__(InvalidFormatException self, Context c) -> InvalidFormatException
__init__(InvalidFormatException self, std::string const & msg) -> InvalidFormatException
__init__(InvalidFormatException self, Throwable t, Context c) -> InvalidFormatException | 625941ba2c8b7c6e89b3565a |
def test_plot_multi_pivot_filter(self): <NEW_LINE> <INDENT> trace1 = trappy.FTrace(name="first") <NEW_LINE> trace2 = trappy.FTrace(name="second") <NEW_LINE> l = trappy.LinePlot( trace1, trappy.cpu_power.CpuInPower, column=[ "dynamic_power", "load1"], filters={ "cdev_state": [ 1, 0]}, pivot="cpus") <NEW_LINE> l.view(test=True) | Tests LinePlot with Pivot and filters | 625941bacb5e8a47e48b7946 |
def pipe_item_split(tokens, loc): <NEW_LINE> <INDENT> if isinstance(tokens, list) or "expr" in tokens.keys(): <NEW_LINE> <INDENT> internal_assert(len(tokens) == 1, "invalid expr pipe item tokens", tokens) <NEW_LINE> return "expr", (tokens[0],) <NEW_LINE> <DEDENT> elif "partial" in tokens.keys(): <NEW_LINE> <INDENT> func, args = tokens <NEW_LINE> pos_args, star_args, kwd_args, dubstar_args = split_function_call(args, loc) <NEW_LINE> return "partial", (func, join_args(pos_args, star_args), join_args(kwd_args, dubstar_args)) <NEW_LINE> <DEDENT> elif "attrgetter" in tokens.keys(): <NEW_LINE> <INDENT> name, args = attrgetter_atom_split(tokens) <NEW_LINE> return "attrgetter", (name, args) <NEW_LINE> <DEDENT> elif "itemgetter" in tokens.keys(): <NEW_LINE> <INDENT> op, args = tokens <NEW_LINE> return "itemgetter", (op, args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise CoconutInternalException("invalid pipe item tokens", tokens) | Process a pipe item, which could be a partial, an attribute access, a method call, or an expression.
Return (type, split) where split is
- (expr,) for expression,
- (func, pos_args, kwd_args) for partial,
- (name, args) for attr/method, and
- (op, args) for itemgetter. | 625941ba2ae34c7f2600cfc9 |
def checkPath(filePre, fileSuf, chr_): <NEW_LINE> <INDENT> pathFile = os.path.join(os.getcwd(), '{}_CHR{}.{}'.format(filePre, chr_, fileSuf)) <NEW_LINE> print(pathFile) <NEW_LINE> if os.path.exists(pathFile): <NEW_LINE> <INDENT> print('FOUND MATCHING FILE {}'.format(pathFile)) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Given a prefix and file extension and a chr number checks if a file exists in current directory
| 625941bab7558d58953c4db2 |
def configure(self, config, **kwargs): <NEW_LINE> <INDENT> self.config = CollectDPlugin.config_to_dict(config)['Module'] | Configuration callback for the plugin.
will be called by collectd with a collectd.
Config object containing configuration data for this plugin from the
collectd configuration file. | 625941bac4546d3d9de728c8 |
def stopping_criteria_rho(self): <NEW_LINE> <INDENT> stop = [] <NEW_LINE> for k in range(self.num_rhos): <NEW_LINE> <INDENT> stop.append(np.real(np.max(np.linalg.eigvals(self.R[k])) - self.data_measurements_total[k])) <NEW_LINE> <DEDENT> return np.max(stop) | Stopping criteria for R rho R algorithm | 625941ba1d351010ab8559b4 |
def mk_amat(output_filename, data, lag, phi, field='T'): <NEW_LINE> <INDENT> arr = np.reshape(data.values, (data.shape[0], -1)) <NEW_LINE> data = pd.DataFrame(arr, index=data.coords[data.dims[0]]) <NEW_LINE> out = project_lag(phi, data, lag) <NEW_LINE> with open(output_filename, "wb") as f: <NEW_LINE> <INDENT> pickle.dump(out, f) | datafile is an xarray object with 't' dimension first
TODO: this step uses a ton of memory! use on disk arrays maybe. | 625941ba7b25080760e392f2 |
def bind(self, *pos, **kw): <NEW_LINE> <INDENT> proxy_type, proxy_addr, proxy_port, rdns, username, password = self.proxy <NEW_LINE> if not proxy_type or self.type != socket.SOCK_DGRAM: <NEW_LINE> <INDENT> return _orig_socket.bind(self, *pos, **kw) <NEW_LINE> <DEDENT> if self._proxyconn: <NEW_LINE> <INDENT> raise socket.error(EINVAL, "Socket already bound to an address") <NEW_LINE> <DEDENT> if proxy_type != SOCKS5: <NEW_LINE> <INDENT> msg = "UDP only supported by SOCKS5 proxy type" <NEW_LINE> raise socket.error(EOPNOTSUPP, msg) <NEW_LINE> <DEDENT> _BaseSocket.bind(self, *pos, **kw) <NEW_LINE> sockname = self.getsockname() <NEW_LINE> _, port = sockname[0], sockname[1] <NEW_LINE> proxy = self._proxy_addr() <NEW_LINE> addrs = socket.getaddrinfo(proxy[0], 0, 0, socket.SOCK_STREAM, socket.SOL_TCP) <NEW_LINE> af, socktype, proto, canonname, sa = addrs[0] <NEW_LINE> self._proxyconn = _orig_socket(af, socktype, proto) <NEW_LINE> self._proxyconn.connect(proxy) <NEW_LINE> if af == socket.AF_INET6: <NEW_LINE> <INDENT> dst = ("::", port) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dst = ("0.0.0.0", port) <NEW_LINE> <DEDENT> UDP_ASSOCIATE = b"\x03" <NEW_LINE> _, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst) <NEW_LINE> host, _ = proxy <NEW_LINE> _, port = relay <NEW_LINE> _BaseSocket.connect(self, (host, port)) <NEW_LINE> self.proxy_sockname = ("0.0.0.0", 0) | Implements proxy connection for UDP sockets,
which happens during the bind() phase. | 625941bad486a94d0b98dfe4 |
def tunnel(self, host, local_port, interim_port, host_port): <NEW_LINE> <INDENT> hosts = self._get_cluster_hosts() <NEW_LINE> ecs_host = hosts[list(hosts.keys())[0]] <NEW_LINE> host_ip, bastion = self._get_host_bastion(ecs_host) <NEW_LINE> cmd = 'ssh -L {}:localhost:{} ec2-user@{} ssh -L {}:{}:{} {}'.format(local_port, interim_port, bastion, interim_port, host, host_port, host_ip) <NEW_LINE> subprocess.call(cmd, shell=True) | Open tunnel to remote system.
:param host:
:param local_port:
:param interim_port:
:param host_port:
:return: | 625941baa17c0f6771cbdeeb |
def check_text(self, text): <NEW_LINE> <INDENT> if to_text_string(text) == u'': <NEW_LINE> <INDENT> self.button_ok.setEnabled(False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.button_ok.setEnabled(True) | Disable empty layout name possibility | 625941ba1d351010ab8559b5 |
def isAnagram(self, s: str, t: str) -> bool: <NEW_LINE> <INDENT> arr1, arr2 = [0] * 26, [0] * 26 <NEW_LINE> for i in s: <NEW_LINE> <INDENT> arr1[ord(i)-ord('a')] += 1 <NEW_LINE> <DEDENT> for j in t: <NEW_LINE> <INDENT> arr2[ord(j)-ord('a')] += 1 <NEW_LINE> <DEDENT> return arr1 == arr2 | # 1.调用系统函数进行排序,Python中是归并和插入;其他语言--快排? sort-array ; sorted-str
s = sorted(s)
t = sorted(t)
return s == t | 625941bad268445f265b4d06 |
def duplicate(obj, changes=None): <NEW_LINE> <INDENT> if not obj.pk: <NEW_LINE> <INDENT> raise ValueError('Instance must be saved before it can be cloned.') <NEW_LINE> <DEDENT> duplicate = copy.copy(obj) <NEW_LINE> duplicate.pk = None <NEW_LINE> for change in changes: <NEW_LINE> <INDENT> duplicate.__setattr__(change[0], change[1]) <NEW_LINE> <DEDENT> duplicate.save() <NEW_LINE> for field in obj._meta.many_to_many: <NEW_LINE> <INDENT> source = getattr(obj, field.attname) <NEW_LINE> destination = getattr(duplicate, field.attname) <NEW_LINE> for item in source.all(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> destination.add(item) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return duplicate | Duplicates any object including m2m fields
changes: any changes that should occur, example
changes = (('fullname','name (copy)'), ('do not copy me', '')) | 625941ba66673b3332b91f2b |
def read_modules_files(file_paths): <NEW_LINE> <INDENT> if not file_paths: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> modules = [] <NEW_LINE> for file in file_paths: <NEW_LINE> <INDENT> file_contents = file.read_text() <NEW_LINE> modules.extend(file_contents.split()) <NEW_LINE> <DEDENT> return modules | Read module names from the files (modules must be newline separated).
Return the module names list or, if no files were provided, an empty list. | 625941ba63d6d428bbe44387 |
def exponential_ease_in_out(p): <NEW_LINE> <INDENT> if p == 0.0 or p == 1.0: <NEW_LINE> <INDENT> return p <NEW_LINE> <DEDENT> if p < 0.5: <NEW_LINE> <INDENT> return 0.5 * pow(2, (20 * p) - 10) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -0.5 * pow(2, (-20 * p) + 10) + 1 | Modeled after the piecewise exponential
y = (1/2)2^(10(2x - 1)) ; [0,0.5)
y = -(1/2)*2^(-10(2x - 1))) + 1 ; [0.5,1] | 625941ba435de62698dfdaeb |
def CheckAccess(cfg): <NEW_LINE> <INDENT> credentials = auth.CreateCredentials(cfg) <NEW_LINE> compute_client = android_compute_client.AndroidComputeClient( cfg, credentials) <NEW_LINE> logger.info("Checking if user has access to project %s", cfg.project) <NEW_LINE> if not compute_client.CheckAccess(): <NEW_LINE> <INDENT> logger.error("User does not have access to project %s", cfg.project) <NEW_LINE> print("Looks like you do not have access to %s. " % cfg.project) <NEW_LINE> if cfg.project in cfg.no_project_access_msg_map: <NEW_LINE> <INDENT> print(cfg.no_project_access_msg_map[cfg.project]) | Check if user has access.
Args:
cfg: An AcloudConfig instance. | 625941ba73bcbd0ca4b2bf15 |
def copy_key(self, ctxt, key_id): <NEW_LINE> <INDENT> barbican_client = self._get_barbican_client(ctxt) <NEW_LINE> try: <NEW_LINE> <INDENT> secret_ref = self._create_secret_ref(key_id, barbican_client) <NEW_LINE> secret = self._get_secret(ctxt, secret_ref) <NEW_LINE> con_type = secret.content_types['default'] <NEW_LINE> secret_data = self._get_secret_data(secret, payload_content_type=con_type) <NEW_LINE> key = keymgr_key.SymmetricKey(secret.algorithm, secret_data) <NEW_LINE> copy_uuid = self.store_key(ctxt, key, secret.expiration, secret.name, con_type, 'base64', secret.algorithm, secret.bit_length, secret.mode, True) <NEW_LINE> return copy_uuid <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> with excutils.save_and_reraise_exception(): <NEW_LINE> <INDENT> LOG.exception(_LE("Error copying key.")) | Copies (i.e., clones) a key stored by barbican.
:param ctxt: contains information of the user and the environment for
the request (cinder/context.py)
:param key_id: the UUID of the key to copy
:return: the UUID of the key copy
:throws Exception: if key copying fails | 625941ba21a7993f00bc7b82 |
def __str__(self, mode="dd"): <NEW_LINE> <INDENT> text = [] <NEW_LINE> if mode == "dd": <NEW_LINE> <INDENT> text.append("S" if self.latitude < 0 else "N") <NEW_LINE> text.append("%06.3f°; " % abs(self.latitude)) <NEW_LINE> text.append("W" if self.longitude < 0 else "E") <NEW_LINE> text.append("%07.3f°" % abs(self.longitude)) <NEW_LINE> <DEDENT> elif mode in ("dm", "dms"): <NEW_LINE> <INDENT> text = _dms_formatter(self.latitude, self.longitude, mode) <NEW_LINE> <DEDENT> elif mode == "locator": <NEW_LINE> <INDENT> text.append(self.to_grid_locator()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unknown mode type `%s'" % mode) <NEW_LINE> <DEDENT> return "".join(text) | Pretty printed location string
>>> print(Point(52.015, -0.221))
N52.015°; W000.221°
>>> print(Point(52.015, -0.221).__str__(mode="dm"))
52°00.90'N, 000°13.26'W
>>> print(Point(52.015, -0.221).__str__(mode="dms"))
52°00'54"N, 000°13'15"W
>>> print(Point(33.9400, -118.4000).__str__(mode="dms"))
33°56'23"N, 118°24'00"W
>>> print(Point(52.015, -0.221).__str__(mode="locator"))
IO92
:type mode: ``str``
:param mode: Coordinate formatting system to use
:rtype: ``str``
:return: Human readable string representation of ``Point`` object
:raise ValueError: Unknown value for ``mode`` | 625941ba31939e2706e4cd07 |
def init(self, ui, parent, style): <NEW_LINE> <INDENT> self.ui = ui <NEW_LINE> self.control = ui.control <NEW_LINE> view = ui.view <NEW_LINE> revert = apply = False <NEW_LINE> if self.control is not None: <NEW_LINE> <INDENT> if hasattr(self, "revert"): <NEW_LINE> <INDENT> revert = self.revert.isEnabled() <NEW_LINE> <DEDENT> if hasattr(self, "apply"): <NEW_LINE> <INDENT> apply = self.apply.isEnabled() <NEW_LINE> <DEDENT> ui.reset() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.create_dialog(parent, style) <NEW_LINE> context = ui.context <NEW_LINE> ui._context = context <NEW_LINE> ui.context = self._copy_context(context) <NEW_LINE> ui._revert = self._copy_context(context) <NEW_LINE> <DEDENT> self.set_icon(view.icon) <NEW_LINE> buttons = [self.coerce_button(button) for button in view.buttons] <NEW_LINE> nr_buttons = len(buttons) <NEW_LINE> if (nr_buttons != 1) or (not self.is_button(buttons[0], "")): <NEW_LINE> <INDENT> bbox = QtGui.QDialogButtonBox() <NEW_LINE> if nr_buttons == 0: <NEW_LINE> <INDENT> if view.apply: <NEW_LINE> <INDENT> self.check_button(buttons, ApplyButton) <NEW_LINE> if view.revert: <NEW_LINE> <INDENT> self.check_button(buttons, RevertButton) <NEW_LINE> <DEDENT> <DEDENT> if view.ok: <NEW_LINE> <INDENT> self.check_button(buttons, OKButton) <NEW_LINE> <DEDENT> if view.cancel: <NEW_LINE> <INDENT> self.check_button(buttons, CancelButton) <NEW_LINE> <DEDENT> if view.help: <NEW_LINE> <INDENT> self.check_button(buttons, HelpButton) <NEW_LINE> <DEDENT> <DEDENT> for raw_button, button in zip(view.buttons, buttons): <NEW_LINE> <INDENT> default = raw_button == view.default_button <NEW_LINE> if self.is_button(button, "Apply"): <NEW_LINE> <INDENT> self.apply = self.add_button( button, bbox, QtGui.QDialogButtonBox.ButtonRole.ApplyRole, self._on_apply, enabled=apply, default=default, ) <NEW_LINE> ui.observe(self._on_applyable, "modified", dispatch="ui") <NEW_LINE> <DEDENT> elif self.is_button(button, "Revert"): <NEW_LINE> <INDENT> self.revert = self.add_button( button, bbox, QtGui.QDialogButtonBox.ButtonRole.ResetRole, self._on_revert, enabled=revert, default=default, ) <NEW_LINE> <DEDENT> elif self.is_button(button, "OK"): <NEW_LINE> <INDENT> self.ok = self.add_button( button, bbox, QtGui.QDialogButtonBox.ButtonRole.AcceptRole, self.control.accept, default=default, ) <NEW_LINE> ui.observe(self._on_error, "errors", dispatch="ui") <NEW_LINE> <DEDENT> elif self.is_button(button, "Cancel"): <NEW_LINE> <INDENT> self.add_button( button, bbox, QtGui.QDialogButtonBox.ButtonRole.RejectRole, self.control.reject, default=default, ) <NEW_LINE> <DEDENT> elif self.is_button(button, "Help"): <NEW_LINE> <INDENT> self.add_button( button, bbox, QtGui.QDialogButtonBox.ButtonRole.HelpRole, self._on_help, default=default, ) <NEW_LINE> <DEDENT> elif not self.is_button(button, ""): <NEW_LINE> <INDENT> self.add_button( button, bbox, QtGui.QDialogButtonBox.ButtonRole.ActionRole, default=default, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> bbox = None <NEW_LINE> <DEDENT> self.add_contents(panel(ui), bbox) | Initialise the object. | 625941ba796e427e537b045a |
def add_count(number, the_list): <NEW_LINE> <INDENT> if number in the_list: <NEW_LINE> <INDENT> the_list[number] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> the_list[number] = 1 | Small function to make sure list start from 0.
:param number:
:param the_list:
:return: | 625941babe8e80087fb20ae0 |
def reshape_axes(axes, shape, newshape, unknown=None): <NEW_LINE> <INDENT> shape = tuple(shape) <NEW_LINE> newshape = tuple(newshape) <NEW_LINE> if len(axes) != len(shape): <NEW_LINE> <INDENT> raise ValueError('axes do not match shape') <NEW_LINE> <DEDENT> size = product(shape) <NEW_LINE> newsize = product(newshape) <NEW_LINE> if size != newsize: <NEW_LINE> <INDENT> raise ValueError('cannot reshape %s to %s' % (shape, newshape)) <NEW_LINE> <DEDENT> if not axes or not newshape: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> lendiff = max(0, len(shape) - len(newshape)) <NEW_LINE> if lendiff: <NEW_LINE> <INDENT> newshape = newshape + (1,) * lendiff <NEW_LINE> <DEDENT> i = len(shape) - 1 <NEW_LINE> prodns = 1 <NEW_LINE> prods = 1 <NEW_LINE> result = [] <NEW_LINE> for ns in newshape[:: -1]: <NEW_LINE> <INDENT> prodns *= ns <NEW_LINE> while i > 0 and shape[i] == 1 and ns != 1: <NEW_LINE> <INDENT> i -= 1 <NEW_LINE> <DEDENT> if ns == shape[i] and prodns == prods * shape[i]: <NEW_LINE> <INDENT> prods *= shape[i] <NEW_LINE> result.append(axes[i]) <NEW_LINE> i -= 1 <NEW_LINE> <DEDENT> elif unknown: <NEW_LINE> <INDENT> result.append(unknown) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unknown = 'Q' <NEW_LINE> result.append(unknown) <NEW_LINE> <DEDENT> <DEDENT> return ''.join(reversed(result[lendiff:])) | Return axes matching new shape.
By default, unknown dimensions are labelled 'Q'.
>>> reshape_axes('YXS', (219, 301, 1), (219, 301))
'YX'
>>> reshape_axes('IYX', (12, 219, 301), (3, 4, 219, 1, 301, 1))
'QQYQXQ' | 625941ba7d43ff24873a2b3a |
def __init__(self, parent=None): <NEW_LINE> <INDENT> super(ItemMapCropperDialog, self).__init__(parent) <NEW_LINE> self.setupUi(self) | Constructor. | 625941ba01c39578d7e74cdb |
def partial_fit(self, X, y): <NEW_LINE> <INDENT> if not self.w_initialised: <NEW_LINE> <INDENT> self._initialise_weights(X.shape[1]) <NEW_LINE> <DEDENT> if y.ravel().shape[0] > 1: <NEW_LINE> <INDENT> for xi, target in zip(X,y): <NEW_LINE> <INDENT> self._update_weights(xi, target) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._update_weights(X, y) <NEW_LINE> <DEDENT> return self | Fit training data without reinitialising the weights. | 625941ba4e4d5625662d4274 |
def OnKeyDown(self, event): <NEW_LINE> <INDENT> if self._hasFocus and event.GetKeyCode() == ord(" "): <NEW_LINE> <INDENT> self._mouseAction = HOVER <NEW_LINE> self.Refresh() <NEW_LINE> <DEDENT> event.Skip() | Handles the ``wx.EVT_KEY_DOWN`` event for L{RoundButton}.
:param `event`: a `wx.KeyEvent` event to be processed. | 625941baa8370b7717052738 |
def admin_duid(self, transaction: Transaction) -> str: <NEW_LINE> <INDENT> return transaction.client.duid_ll or transaction.client.duid | Show the DUID as MAC address if possible
:param transaction: The transaction object
:return: The MAC address embedded in the DUID, otherwise the DUID itself | 625941ba66656f66f7cbc042 |
def __init__(self, arg, size=32): <NEW_LINE> <INDENT> assert isinstance(arg, Expr) <NEW_LINE> assert isinstance(size, (int, long)) <NEW_LINE> super(ExprMem, self).__init__() <NEW_LINE> if not isinstance(arg, Expr): <NEW_LINE> <INDENT> raise ValueError( 'ExprMem: arg must be an Expr (not %s)' % type(arg)) <NEW_LINE> <DEDENT> self.__arg, self.__size = arg, size | Create an ExprMem
@arg: Expr, memory access address
@size: int, memory access size | 625941ba498bea3a759b9948 |
def test_edit_profile_view(self): <NEW_LINE> <INDENT> resolver = resolve('/profile/edit/') <NEW_LINE> self.assertEquals(resolver.view_name, 'edit_profile') | Test correct view is attached to correct URL. | 625941ba96565a6dacc8f56d |
def ge(self, other, axis="columns", level=None): <NEW_LINE> <INDENT> if level is not None: <NEW_LINE> <INDENT> if isinstance(other, DataFrame): <NEW_LINE> <INDENT> other = other._query_compiler.to_pandas() <NEW_LINE> <DEDENT> return self._default_to_pandas( pandas.DataFrame.ge, other, axis=axis, level=level ) <NEW_LINE> <DEDENT> other = self._validate_other(other, axis, comparison_dtypes_only=True) <NEW_LINE> new_query_compiler = self._query_compiler.ge( other=other, axis=axis, level=level ) <NEW_LINE> return self._create_dataframe_from_compiler(new_query_compiler) | Checks element-wise that this is greater than or equal to other.
Args:
other: A DataFrame or Series or scalar to compare to.
axis: The axis to perform the gt over.
level: The Multilevel index level to apply gt over.
Returns:
A new DataFrame filled with Booleans. | 625941bad10714528d5ffb78 |
def test_pad_big_block(self) -> None: <NEW_LINE> <INDENT> blocksize = 256 + len(MESSAGE) <NEW_LINE> with self.assertRaises(m09.PKCS7PaddingError): <NEW_LINE> <INDENT> m09.pkcs7(MESSAGE, blocksize) | PKCS7 with blocksize too large | 625941ba596a897236089962 |
def connect(url, ldap_suffix, user, password): <NEW_LINE> <INDENT> _LOGGER.debug('Connecting to LDAP %s, %s', url, ldap_suffix) <NEW_LINE> conn = admin.Admin(url, ldap_suffix, user=user, password=password) <NEW_LINE> conn.connect() <NEW_LINE> return conn | Connect to from parent context parameters. | 625941ba187af65679ca4fb5 |
def result_process(self, urlo): <NEW_LINE> <INDENT> self.parent.http_result_handle(self, urlo) | Process http fetch result | 625941ba45492302aab5e158 |
def topological_sort_kahn(self): <NEW_LINE> <INDENT> degree = [0] * self.graph_matrix.__len__()[0] <NEW_LINE> for row in range(0, self.graph_matrix.__len__()[0]): <NEW_LINE> <INDENT> for value in range(0, self.graph_matrix.__len__()[0]): <NEW_LINE> <INDENT> if self.graph_matrix.matrix_value[row][value] != 0: <NEW_LINE> <INDENT> degree[value] += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> queue = [] <NEW_LINE> for row in range(0, self.graph_matrix.__len__()[0]): <NEW_LINE> <INDENT> if degree[row] == 0: <NEW_LINE> <INDENT> queue.append(row) <NEW_LINE> <DEDENT> <DEDENT> count = 0 <NEW_LINE> top = [] <NEW_LINE> while queue: <NEW_LINE> <INDENT> val = queue.pop(0) <NEW_LINE> top.append(val) <NEW_LINE> for neighbor in range(0, self.graph_matrix.__len__()[0]): <NEW_LINE> <INDENT> if self.graph_matrix.matrix_value[val][neighbor] != 0: <NEW_LINE> <INDENT> degree[neighbor] -= 1 <NEW_LINE> if degree[neighbor] <= 0: <NEW_LINE> <INDENT> queue.append(neighbor) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> count += 1 <NEW_LINE> <DEDENT> if count != self.graph_matrix.__len__()[0]: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return top | The function to find make topological sort using Kahn's algorithm.
For more information : https://en.wikipedia.org/wiki/Topological_sorting#Kahn's_algorithm
Returns:
list[int] : vertex in topological sort | 625941ba60cbc95b062c63e1 |
def test_permutation_2sided_4by4_directed_translat_scale_padding(): <NEW_LINE> <INDENT> array_a = np.array([[29, 79, 95, 83.], [37, 86, 67, 93.], [72, 85, 15, 3.], [38, 39, 58, 24.]]) <NEW_LINE> perm = np.array([[0, 0, 0, 1], [0, 0, 1, 0], [1, 0, 0, 0], [0, 1, 0, 0]]) <NEW_LINE> array_b = np.dot(perm.T, np.dot(15.3 * array_a + 5.45, perm)) <NEW_LINE> array_a = np.concatenate((array_a, np.zeros((4, 3))), axis=1) <NEW_LINE> array_a = np.concatenate((array_a, np.zeros((10, 7))), axis=0) <NEW_LINE> array_b = np.concatenate((array_b, np.zeros((4, 2))), axis=1) <NEW_LINE> array_b = np.concatenate((array_b, np.zeros((6, 6))), axis=0) <NEW_LINE> new_a, new_b, U, e_opt = permutation_2sided(array_a, array_b, transform_mode='single_directed', translate=True, scale=True) <NEW_LINE> assert_almost_equal(U, perm, decimal=6) <NEW_LINE> assert_almost_equal(e_opt, 0., decimal=6) | Test 2sided-perm with 'directed' by 4by4 with translation, scaling and zero paddings. | 625941ba851cf427c661a3aa |
def lstm_step_backward(dnext_h, dnext_c, cache): <NEW_LINE> <INDENT> tan_o, o, f, prev_c, g, i, Wh, Wx, x, prev_h, next_c = cache <NEW_LINE> doh = tan_o*dnext_h*o*(1 - o) <NEW_LINE> dmat_out = dnext_h * o * (1 - tan_o**2) <NEW_LINE> dch = dmat_out*f <NEW_LINE> dfh = dmat_out*prev_c*f*(1-f) <NEW_LINE> dih = dmat_out*g*i*(1-i) <NEW_LINE> dgh = dmat_out*i*(1-g**2) <NEW_LINE> dmath = np.concatenate((dih, dfh, doh, dgh), axis=1) <NEW_LINE> dbh = np.sum(dmath, axis=0) <NEW_LINE> dxh = dmath.dot(Wx.T) <NEW_LINE> dWxh = x.T.dot(dmath) <NEW_LINE> dhh = dmath.dot(Wh.T) <NEW_LINE> dWhh = prev_h.T.dot(dmath) <NEW_LINE> dprev_c = dch + dnext_c*f <NEW_LINE> dfc = dnext_c*prev_c*f*(1-f) <NEW_LINE> dic = dnext_c*g*i*(1-i) <NEW_LINE> dgc = dnext_c*i*(1-g**2) <NEW_LINE> doc = np.zeros_like(dfc) <NEW_LINE> dmatc = np.concatenate((dic, dfc, doc, dgc), axis=1) <NEW_LINE> dbc = np.sum(dmatc, axis=0) <NEW_LINE> dxc = dmatc.dot(Wx.T) <NEW_LINE> dWxc = x.T.dot(dmatc) <NEW_LINE> dhc = dmatc.dot(Wh.T) <NEW_LINE> dWhc = prev_h.T.dot(dmatc) <NEW_LINE> dprev_h = dhc+dhh <NEW_LINE> dx = dxc + dxh <NEW_LINE> dWh = dWhc + dWhh <NEW_LINE> dWx = dWxc + dWxh <NEW_LINE> db = dbc + dbh <NEW_LINE> return dx, dprev_h, dprev_c, dWx, dWh, db | Backward pass for a single timestep of an LSTM.
Inputs:
- dnext_h: Gradients of next hidden state, of shape (N, H)
- dnext_c: Gradients of next cell state, of shape (N, H)
- cache: Values from the forward pass
Returns a tuple of:
- dx: Gradient of input data, of shape (N, D)
- dprev_h: Gradient of previous hidden state, of shape (N, H)
- dprev_c: Gradient of previous cell state, of shape (N, H)
- dWx: Gradient of input-to-hidden weights, of shape (D, 4H)
- dWh: Gradient of hidden-to-hidden weights, of shape (H, 4H)
- db: Gradient of biases, of shape (4H,) | 625941ba0fa83653e4656e55 |
def test_a_taxonomies_request_is_executed(self): <NEW_LINE> <INDENT> response_json = { "success": True, "taxonomies": [ { "description": "The iptc document classification resource classifies texts based on the IPTC Media Topics taxonomy", "languages": [ { "code": "en", "name": "English" }, { "code": "es", "name": "Spanish" }, { "code": "fr", "name": "French" }, { "code": "de", "name": "German" }, { "code": "it", "name": "Italian" } ], "name": "iptc" }, { "contract": "https://github.com/therealexpertai/nlapi-openapi-specification/blob/master/geotax.yaml", "description": "The geotax document classification resource recognizes geographic places cited in the text and returns corresponding countries' names. In addition, when requested with a specific query-string parameter, it returns extra-data containing equivalent GeoJSON objects. See the specific OpenAPI document (https://github.com/therealexpertai/nlapi-openapi-specification/blob/master/geotax.yaml) for information about the way to obtain and interpret GeoJSON data.", "languages": [ { "code": "en", "name": "English" }, { "code": "es", "name": "Spanish" }, { "code": "fr", "name": "French" }, { "code": "de", "name": "German" }, { "code": "it", "name": "Italian" } ], "name": "geotax" } ] } <NEW_LINE> response = MagicMock() <NEW_LINE> response.status_code = 200 <NEW_LINE> response.ok = True <NEW_LINE> response.json.return_value = response_json <NEW_LINE> self.patched_get.return_value = response <NEW_LINE> client = ExpertAiClient() <NEW_LINE> dm = client.taxonomies() <NEW_LINE> self.assertEqual(dm.taxonomies[1].name, "geotax") <NEW_LINE> self.assertEqual(dm.taxonomies[0].languages[2].code, "fr") | ...then verify that whole flow works as expected | 625941ba8c0ade5d55d3e858 |
def test_empty_video_id(): <NEW_LINE> <INDENT> result = stream_to_s3("") <NEW_LINE> assert not result | Tests that an empty video id does not give a result | 625941ba1f5feb6acb0c49ed |
def _updatePreferredLanguages(self, person_or_team): <NEW_LINE> <INDENT> if len(person_or_team.languages) > 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> response = self.request.response <NEW_LINE> english = getUtility(ILaunchpadCelebrities).english <NEW_LINE> if person_or_team.is_team: <NEW_LINE> <INDENT> person_or_team.addLanguage(english) <NEW_LINE> team_mapping = {'name': person_or_team.name, 'displayname': person_or_team.displayname} <NEW_LINE> msgid = _("English was added to ${displayname}'s " '<a href="/~${name}/+editlanguages">preferred ' 'languages</a>.', mapping=team_mapping) <NEW_LINE> response.addNotification(structured(msgid)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(browser_languages(self.request)) > 0: <NEW_LINE> <INDENT> languages = browser_languages(self.request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> languages = [english] <NEW_LINE> <DEDENT> for language in languages: <NEW_LINE> <INDENT> person_or_team.addLanguage(language) <NEW_LINE> <DEDENT> language_str = ', '.join([lang.displayname for lang in languages]) <NEW_LINE> msgid = _('<a href="/people/+me/+editlanguages">Your preferred ' 'languages</a> were updated to include your browser ' 'languages: $languages.', mapping={'languages': language_str}) <NEW_LINE> response.addNotification(structured(msgid)) | Check or update the Person's preferred languages as needed.
Answer contacts must tell Launchpad in which languages they provide
help. If the Person has not already set his preferred languages, they
are set to his browser languages. In the case of a team without
languages, only English is added to the preferred languages. When
languages are added, a notification is added to the response. | 625941ba56ac1b37e626406e |
def classFactory(iface): <NEW_LINE> <INDENT> from .indoor_schematic import UFPRCampusMapPlugin <NEW_LINE> return UFPRCampusMapPlugin() | Load UFPRCampusMap class from file UFPRCampusMap.
:param iface: A QGIS interface instance.
:type iface: QgsInterface | 625941ba4c3428357757c1c3 |
def make_handler(pubsub): <NEW_LINE> <INDENT> class _WS(tornado.websocket.WebSocketHandler): <NEW_LINE> <INDENT> def __init__(self, application, request, **kwargs): <NEW_LINE> <INDENT> tornado.websocket.WebSocketHandler.__init__( self, application, request, **kwargs ) <NEW_LINE> <DEDENT> def active(self): <NEW_LINE> <INDENT> return bool(self.ws_connection) <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> _LOGGER.info('Connection opened.') <NEW_LINE> <DEDENT> def send_error_msg(self, error_str, close_conn=True): <NEW_LINE> <INDENT> _LOGGER.info(error_str) <NEW_LINE> error_msg = {'_error': error_str, 'when': datetime.datetime.utcnow().isoformat()} <NEW_LINE> self.write_message(error_msg) <NEW_LINE> if close_conn: <NEW_LINE> <INDENT> _LOGGER.info('Closing connection.') <NEW_LINE> self.close() <NEW_LINE> <DEDENT> <DEDENT> def on_close(self): <NEW_LINE> <INDENT> _LOGGER.info('connection closed.') <NEW_LINE> <DEDENT> def check_origin(self, origin): <NEW_LINE> <INDENT> parsed_origin = urllib.parse.urlparse(origin) <NEW_LINE> _LOGGER.debug('parsed_origin: %r', parsed_origin) <NEW_LINE> return True <NEW_LINE> <DEDENT> def on_message(self, jmessage): <NEW_LINE> <INDENT> if not pubsub: <NEW_LINE> <INDENT> _LOGGER.fatal('pubsub is not configured, ignore.') <NEW_LINE> self.send_error_msg('Fatal: unexpected error', close_conn=True) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> message = json.loads(jmessage) <NEW_LINE> topic = message['topic'] <NEW_LINE> impl = pubsub.impl.get(topic) <NEW_LINE> if not impl: <NEW_LINE> <INDENT> self.send_error_msg('Invalid topic: %r' % topic) <NEW_LINE> return <NEW_LINE> <DEDENT> since = message.get('since', 0) <NEW_LINE> snapshot = message.get('snapshot', False) <NEW_LINE> for watch, pattern in impl.subscribe(message): <NEW_LINE> <INDENT> pubsub.register(watch, pattern, self, impl, since) <NEW_LINE> <DEDENT> if snapshot: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> self.send_error_msg(str(err)) <NEW_LINE> <DEDENT> <DEDENT> def data_received(self, message): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_event(self, filename, operation, _content): <NEW_LINE> <INDENT> _LOGGER.debug('%s %s', filename, operation) <NEW_LINE> return {'time': time.time(), 'filename': filename, 'op': operation} <NEW_LINE> <DEDENT> <DEDENT> return _WS | Make websocket handler factory. | 625941ba004d5f362079a1cf |
def jsonify(*args, **kw): <NEW_LINE> <INDENT> return Response(json.dumps(dict(*args, **kw)), mimetype='application/json') | Creates a :class:`Response` with JSON representation of the given
data provided from the arguments with an `application/json` mimetype.
The arguments to this function are same as :class:`dict` constructor.
Example::
@web.route('/user/info')
def get_user():
return jsonify(name="somename", active=True, key=34)
This will send a JSON response to the client like this::
{
'name': 'somename',
'active': true,
'key': 34
}
:returns: an instance of :class:`Response` | 625941ba8a349b6b435e800d |
@app.route('/', methods=['GET', 'POST']) <NEW_LINE> @app.route('/index', methods=['GET', 'POST']) <NEW_LINE> def index(): <NEW_LINE> <INDENT> form = SignupForm() <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> u_views.run_signup_form(form) <NEW_LINE> return redirect('/#signup') <NEW_LINE> <DEDENT> return render_template("index.html", form=form) | Renders the backend of the index page and returns template | 625941ba63b5f9789fde6f7e |
def load_vocab(vocab_path='vocabulary/vocab.json'): <NEW_LINE> <INDENT> with open(vocab_path, 'r') as f: <NEW_LINE> <INDENT> data = json.loads(f.read()) <NEW_LINE> <DEDENT> word2idx = data <NEW_LINE> idx2word = dict([(v, k) for k, v in data.items()]) <NEW_LINE> return word2idx, idx2word | Loads vocabulary for a w2v model | 625941ba8c3a873295158257 |
def doctest_ResourceDemographics(): <NEW_LINE> <INDENT> pass | Test Resource Demographics objects and adapters
>>> from schooltool.resource import interfaces, resource
>>> from schooltool.basicperson import demographics
First we need to set up a mock app and register its adapter:
>>> from zope.component import provideAdapter
>>> from zope.interface import implements
>>> from schooltool.app.interfaces import ISchoolToolApplication
>>> class MockSchoolToolApplication(dict):
... implements(ISchoolToolApplication)
>>> app = MockSchoolToolApplication()
>>> provideAdapter(lambda context: app, (None,), ISchoolToolApplication)
We need to do what the AppInit adapter would otherwise do:
>>> resource.ResourceInit(app)()
>>> resource.RESOURCE_DEMO_FIELDS_KEY in app
True
>>> resource.RESOURCE_DEMO_DATA_KEY in app
True
There's an adapter for the resource demo fields container:
>>> provideAdapter(resource.getResourceDemographicsFields)
>>> dfs = interfaces.IResourceDemographicsFields(app)
>>> interfaces.IResourceDemographicsFields.providedBy(dfs)
True
>>> len(dfs)
0
We'll add some demo fields to the container, some that are limited to a
specific resource type or types:
>>> dfs['ID'] = demographics.TextFieldDescription("ID", "Identifier")
>>> dfs['square_ft'] = demographics.TextFieldDescription("square_ft",
... "Square Feet", limit_keys=['location'])
>>> dfs['warranty'] = demographics.TextFieldDescription("warranty",
... "Warranty", limit_keys=['equiptment'])
>>> dfs['creation_date'] = demographics.DateFieldDescription(
... "creation_date", "Creation Date",
... limit_keys=['location', 'equiptment'])
When we pass the filter_key method a key that does not
belong to any of the limit_keys lists, then it will only return
those fields that have empty limit_keys lists.
>>> [f.__name__ for f in dfs.filter_key('anything')]
[u'ID']
When we pass 'location', it picks up the additional fields that are for
location type resources.
>>> [f.__name__ for f in dfs.filter_key('location')]
[u'ID', u'square_ft', u'creation_date']
When we pass 'equiptment', it picks up the additional fields that are for
equiptment type resources.
>>> [f.__name__ for f in dfs.filter_key('equiptment')]
[u'ID', u'warranty', u'creation_date']
Finally there's an adapter that adapts a resource it's demo data:
>>> provideAdapter(resource.getResourceDemographics)
Now we will create a resource and see what we get when we adapt it to
IDemographics:
>>> sample = resource.Resource('Sample Resource')
>>> sample.__name__ = 'sample'
>>> demos = interfaces.IResourceDemographics(sample)
>>> interfaces.IResourceDemographics.providedBy(demos)
True
>>> len(demos)
0 | 625941baff9c53063f47c095 |
def remove_value(self, value): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._head.data == value: <NEW_LINE> <INDENT> self._head = self._head.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> moved_node = self._head <NEW_LINE> while moved_node.next is not None: <NEW_LINE> <INDENT> if moved_node.next.data == value: <NEW_LINE> <INDENT> moved_node.next = moved_node.next.next <NEW_LINE> break <NEW_LINE> <DEDENT> moved_node = moved_node.next <NEW_LINE> <DEDENT> if moved_node.next is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> self._size -= 1 | Removes the first item in the list with this value
Time Complexity: O(n) | 625941babe8e80087fb20ae1 |
def merge_text(self, params): <NEW_LINE> <INDENT> tmpdir = tempfile.mkdtemp(prefix='po_merge') <NEW_LINE> env = {} <NEW_LINE> env['this'] = osutils.pathjoin(tmpdir, 'this') <NEW_LINE> env['other'] = osutils.pathjoin(tmpdir, 'other') <NEW_LINE> env['result'] = osutils.pathjoin(tmpdir, 'result') <NEW_LINE> env['pot_file'] = self.pot_file_abspath <NEW_LINE> try: <NEW_LINE> <INDENT> with osutils.open_file(env['this'], 'wb') as f: <NEW_LINE> <INDENT> f.writelines(params.this_lines) <NEW_LINE> <DEDENT> with osutils.open_file(env['other'], 'wb') as f: <NEW_LINE> <INDENT> f.writelines(params.other_lines) <NEW_LINE> <DEDENT> command = self.conf.expand_options(self.command, env) <NEW_LINE> retcode, out, err = self._invoke(command) <NEW_LINE> with osutils.open_file(env['result'], 'rb') as f: <NEW_LINE> <INDENT> return 'success', list(f.readlines()) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> osutils.rmtree(tmpdir) <NEW_LINE> <DEDENT> return 'not applicable', [] | Calls msgmerge when .po files conflict.
This requires a valid .pot file to reconcile both sides. | 625941bacad5886f8bd26e7b |
@cli.command() <NEW_LINE> @click.option('--recursive', '-r', default=False, is_flag=True) <NEW_LINE> @click.argument('url') <NEW_LINE> @click.argument('dir') <NEW_LINE> def cp(recursive, url, dir): <NEW_LINE> <INDENT> (bucket_name, prefix) = bucket_path_from_url(url) <NEW_LINE> bucket = storage_client.get_bucket(bucket_name) <NEW_LINE> blobs = [blob for blob in bucket.list_blobs(prefix=prefix)] <NEW_LINE> if len(blobs) == 0: <NEW_LINE> <INDENT> raise myutil.exceptions.CommandException('No URLs matched: {}'.format(url)) <NEW_LINE> <DEDENT> if not recursive and len(blobs) > 1: <NEW_LINE> <INDENT> print('Omitting prefix "gs://{}/{}/". (Did you mean to do cp -r?)'.format(bucket.name, prefix)) <NEW_LINE> raise myutil.exceptions.CommandException('No URLs matched') <NEW_LINE> <DEDENT> download_blobs(blobs=blobs, dir=dir, prefix=prefix, recursive=recursive) | Copy blobs from a bucket
Keyword arguments:
url -- The URL in the format gs://bucket/subdir
dir -- The dir to copy to. If recursive, it will create | 625941ba94891a1f4081b941 |
def get_queryset(self): <NEW_LINE> <INDENT> return Question.objects.filter( pub_date__lte=timezone.now()).order_by('-pub_date')[:5] | Return the last five published questions (not including those set
to be published in the future) | 625941ba3539df3088e2e1e4 |
def __init__(self, binary='git', gitdir=None, worktree=None, remote=None, branch=None): <NEW_LINE> <INDENT> super(Scm, self).__init__() <NEW_LINE> self._gitcmd = binary <NEW_LINE> self._worktree = os.path.realpath(worktree or os.getcwd()) <NEW_LINE> self._gitdir = os.path.realpath(gitdir) if gitdir else os.path.join(self._worktree, '.git') <NEW_LINE> self._remote = remote <NEW_LINE> self._branch = branch | Creates a git scm proxy that assumes the git repository is in the cwd by default.
binary: The path to the git binary to use, 'git' by default.
gitdir: The path to the repository's git metadata directory (typically '.git').
worktree: The path to the git repository working tree directory (typically '.').
remote: The default remote to use.
branch: The default remote branch to use. | 625941bad164cc6175782be6 |
def _stop_logging(self, timeout=TIMEOUT): <NEW_LINE> <INDENT> self._wakeup() <NEW_LINE> self._send_break(duration=3000) <NEW_LINE> time.sleep(2) <NEW_LINE> timeout = 3 <NEW_LINE> self._wakeup_until(timeout, TeledynePrompt.COMMAND) <NEW_LINE> if self._is_logging(timeout): <NEW_LINE> <INDENT> log.error("FAILED TO STOP LOGGING in _stop_logging") <NEW_LINE> raise InstrumentProtocolException("failed to stop logging") | Command the instrument to stop logging
@param timeout: how long to wait for a prompt
@throws: InstrumentTimeoutException if prompt isn't seen
@throws: InstrumentProtocolException failed to stop logging | 625941ba6e29344779a624ae |
def __init__(self, host='atlas', dbname='syntextua'): <NEW_LINE> <INDENT> if host == 'atlas': <NEW_LINE> <INDENT> login = os.getenv('SYNTEXTDBLOG') <NEW_LINE> password = os.getenv('SYNTEXTDBPWD') <NEW_LINE> hosturl = os.getenv('SYNTEXTDBHOST') <NEW_LINE> if not login or not password or not hosturl: <NEW_LINE> <INDENT> raise EnvironmentError( 'No credentials found. Make sure that SYNTEXTDBLOG, ' 'SYNTEXTDBPWD and SYNTEXTDBHOST environment variables ', 'is provided.' ) <NEW_LINE> <DEDENT> url = 'mongodb+srv://%s:%s@%s' % ( login, password, hosturl, ) <NEW_LINE> self.cli = MongoClient(url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cli = MongoClient() <NEW_LINE> <DEDENT> if dbname not in self.cli.list_database_names(): <NEW_LINE> <INDENT> raise RuntimeError( f"No database with name '{dbname}' exists on host you're " f"trying to connect. " ) <NEW_LINE> <DEDENT> self.cli = self.cli.get_database(dbname) <NEW_LINE> self.dbname = dbname | Return MongoClient() connected to 'localhost' or 'atlas'. You must
provide %SYNTEXTDBLOG%, %SYNTEXTDBPWD% and %SYNTEXTDBHOST% environment
variables to make it work.
Args:
host (str): If 'localhost' then connect to your local database; if
'atlas' then connect to MongoDB Atlas.
dbname (str): Name of database to use.
Returns:
MongoClient: Connection to database provided by pymongo lib.
Raises:
EnvironmentError: Some of environment variables, which contains
credentials, are missing, so that connection is impossible.
RuntimeError: No database with the given name exists.
...Errors from pymongo.errors | 625941ba4428ac0f6e5ba68b |
def write_school(file_out, row): <NEW_LINE> <INDENT> with open(file_out, 'a') as csvfile: <NEW_LINE> <INDENT> spamwriter = csv.writer(csvfile) <NEW_LINE> try: <NEW_LINE> <INDENT> spamwriter.writerow(row) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print (e) | writes row of data to file_out | 625941ba3c8af77a43ae3636 |
def assign_trump(talon): <NEW_LINE> <INDENT> trump = talon[0] <NEW_LINE> print("Trump card is {}".format(trump.card_name())) <NEW_LINE> return trump | sets global trump variable - use at the start of each game | 625941ba099cdd3c635f0af5 |
def testGeneratedExamples(self): <NEW_LINE> <INDENT> permutations_of_5 = list(permutations(range(_NUMBER_OF_CARS_TO_PERMUTE))) <NEW_LINE> number_of_permutations = len(permutations_of_5) <NEW_LINE> moves_sum = 0 <NEW_LINE> for i in range(_NUMBER_OF_GENERATED_TESTS): <NEW_LINE> <INDENT> start_positions = list(permutations_of_5[randint(0, number_of_permutations - 1)]) <NEW_LINE> end_positions = list(permutations_of_5[randint(0, number_of_permutations - 1)]) <NEW_LINE> logger = logging.getLogger(__name__) <NEW_LINE> logger.info('Generating test: start positions = %s, end positions = %s', start_positions, end_positions) <NEW_LINE> verify_result, number_of_moves = _test_result(start_positions, end_positions) <NEW_LINE> self.assertTrue(verify_result) <NEW_LINE> self.assertTrue(verify_result) <NEW_LINE> logger.info('Test passed, number of moves performed = %s', number_of_moves) <NEW_LINE> moves_sum += number_of_moves <NEW_LINE> <DEDENT> average_moves_number = float(moves_sum)/_NUMBER_OF_GENERATED_TESTS <NEW_LINE> logger.info('Average number of moves preformed in %s generated tests with %s cars to permute is: %s', _NUMBER_OF_GENERATED_TESTS, _NUMBER_OF_CARS_TO_PERMUTE, average_moves_number) | Generate all possible permutations of 5 cars' layouts on the parking and randomly choose 100 pairs
from the pairs of of those permutations. Use the pairs of permutations as paris of start and end
positions of cars. | 625941ba21a7993f00bc7b83 |
def get_update(self): <NEW_LINE> <INDENT> return self.go("update") | Click <Get update> | 625941ba38b623060ff0ac88 |
@contextmanager <NEW_LINE> def use_camera(port=None, exposure=None): <NEW_LINE> <INDENT> import time <NEW_LINE> msg = port is None and exposure is None <NEW_LINE> conf = _load_config(msg) <NEW_LINE> port = conf["port"] if port is None else port <NEW_LINE> exposure = conf["exposure"] if exposure is None else exposure <NEW_LINE> assert isinstance(port, int) <NEW_LINE> assert isinstance(exposure, (float, int)) <NEW_LINE> try: <NEW_LINE> <INDENT> camera = cv2.VideoCapture(port) <NEW_LINE> time.sleep(exposure) <NEW_LINE> yield camera <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> camera.release() | A context manager for a `cv2.VideoCapture()` instance. An amount of time,
`exposure`, is waited before yielding the camera device to the user.
Leaving the context releases the camera.
Parameters
----------
port : Optional[int], (default=0)
An integer, typically 0. This indicates which camera-device should be used.
exposure : Optional[float], (default=0.1)
The time (seconds) for which the camera is active before taking the photo.
If your photo is too dark, try increasing this time.
Yields
------
cv2.VideoCapture
The video-capture instance of the specified camera. | 625941ba50812a4eaa59c1bd |
def on_new(self, message, context): <NEW_LINE> <INDENT> message.pop(0) <NEW_LINE> self.to_queue({self.MESSAGE: message, self.CURRENT: None}) <NEW_LINE> del self._queue[:-1] <NEW_LINE> self.context = context | New 'special' event: called when starting the handle loop and :attr:`Process.NEW` command is specified.
This event has no conditions and is called by :meth:`Process.handle` directly. | 625941baec188e330fd5a63e |
def _markAlreadyRan(self): <NEW_LINE> <INDENT> self.__ran = True | WARNING - call this function with care -
about the only valid use is when adding a SuperCommand to the command
stack where the commands have been executed individually | 625941ba711fe17d8254220b |
def lookForDeprecationWarning(self, testmethod): <NEW_LINE> <INDENT> warningsShown = self.flushWarnings([testmethod]) <NEW_LINE> self.assertEqual(len(warningsShown), 1) <NEW_LINE> self.assertIdentical(warningsShown[0]['category'], DeprecationWarning) <NEW_LINE> self.assertEqual( warningsShown[0]['message'], "twisted.internet.interfaces.IFinishableConsumer " "was deprecated in Twisted 11.1.0: Please use IConsumer " "(and IConsumer.unregisterProducer) instead.") | Importing C{testmethod} emits a deprecation warning. | 625941baac7a0e7691ed3f72 |
@icm.subjectToTracking(fnLoc=True, fnEntry=True, fnExit=True) <NEW_LINE> def sendBasedOnHeadersInfo( msg, ): <NEW_LINE> <INDENT> bx822Set_setMandatoryFields(msg,) <NEW_LINE> if 'BX-Sending-Method' in msg: <NEW_LINE> <INDENT> sendingMethod = msg['BX-Sending-Method'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return icm.EH_problem_info("BX-Sending-Method") <NEW_LINE> <DEDENT> if sendingMethod == SendingMethod.inject: <NEW_LINE> <INDENT> opOutcome = injectBasedOnHeadersInfo(msg,) <NEW_LINE> <DEDENT> elif sendingMethod == SendingMethod.submit: <NEW_LINE> <INDENT> opOutcome = submitBasedOnHeadersInfo(msg,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (icm.EH_problem_info("Bad Usage")) <NEW_LINE> <DEDENT> return opOutcome | ** Submit or Inject msg using information contained in the header.
** Overview
Dispatch to recipients based on tailored Msg and tailored submission info.
Processes headers and creates two classes,
- MsgForRecipient
- OptionsForRecipient
Given a msg and recipients from MsgForRecipient
- find common options for common recipients
- Submit based on msg and options
| 625941baf9cc0f698b14049e |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.