code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def SetAlignment(self,alignment): <NEW_LINE> <INDENT> if self.alignment != alignment: <NEW_LINE> <INDENT> self.alignment = alignment <NEW_LINE> self.Sizer.Detach(self.content) <NEW_LINE> self.Sizer.Add(self.content, 1 if alignment == ALIGN_LEFT else 0, self.alignment) <NEW_LINE> self.Sizer.Layout() | Can change the alignment of the buttons in the bar, left or center...
and maybe right? | 625941b5d10714528d5ffad1 |
def get_amount_profile(GC_airdens,GC_mixrat,GC_height): <NEW_LINE> <INDENT> air_amount = np.multiply(conv4D_3D(GC_airdens), conv4D_3D(GC_height ) ) <NEW_LINE> tra_amount = np.multiply(air_amount, conv4D_3D(GC_mixrat ) ) <NEW_LINE> return tra_amount | Produces a 3D array of the amount of a substance per area in each voxel | 625941b556b00c62f0f1444e |
@deprecated('site.load_pages_from_pageids()', since='20200515') <NEW_LINE> def PagesFromPageidGenerator(pageids, site=None): <NEW_LINE> <INDENT> if site is None: <NEW_LINE> <INDENT> site = pywikibot.Site() <NEW_LINE> <DEDENT> return site.load_pages_from_pageids(pageids) | DEPRECATED. Return a page generator from pageids.
Pages are iterated in the same order than in the underlying pageids.
Pageids are filtered and only one page is returned in case of
duplicate pageid.
:param pageids: an iterable that returns pageids, or a comma-separated
string of pageids (e.g. '945097,1483753,956608')
:param site: Site for generator results.
:type site: :py:obj:`pywikibot.site.BaseSite` | 625941b55fdd1c0f98dc0023 |
def OeManipulateFP(inputfile,outputfile,formats='sdf',fingerprint='false',addsame='false'): <NEW_LINE> <INDENT> ifs=OeReadfile(inputfile) <NEW_LINE> if formats=='sdf': <NEW_LINE> <INDENT> ofs=OeWritefile(outputfile) <NEW_LINE> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile=open(outputfile,'w') <NEW_LINE> if fingerprint=='MACCSH': <NEW_LINE> <INDENT> csvfile.write('MACCSH') <NEW_LINE> if fingerprint=='MACCSB' or addsame!='false': <NEW_LINE> <INDENT> csvfile.write(',') <NEW_LINE> <DEDENT> <DEDENT> if fingerprint=='MACCSB': <NEW_LINE> <INDENT> for i in range(0,166): <NEW_LINE> <INDENT> csvfile.write(str(i+1)) <NEW_LINE> if i!=166: <NEW_LINE> <INDENT> csvfile.write(',') <NEW_LINE> <DEDENT> elif addsame!='false': <NEW_LINE> <INDENT> csvfile.write(',') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if addsame!='false': <NEW_LINE> <INDENT> csvfile.write(str(addsame)) <NEW_LINE> <DEDENT> csvfile.write('\n') <NEW_LINE> <DEDENT> for mol in ifs.GetOEGraphMols(): <NEW_LINE> <INDENT> if fingerprint!='false': <NEW_LINE> <INDENT> fp=OEFingerPrint() <NEW_LINE> OEMakeFP(fp, mol, OEFPType_MACCS166) <NEW_LINE> if fingerprint=='MACCSH': <NEW_LINE> <INDENT> if formats=='sdf': <NEW_LINE> <INDENT> OESetSDData(mol, 'MACCS', str(fp.ToHexString())) <NEW_LINE> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile.write(str(fp.ToHexString())) <NEW_LINE> if fingerprint=='MACCSB' or addsame!='false': <NEW_LINE> <INDENT> csvfile.write(',') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if fingerprint=='MACCSB': <NEW_LINE> <INDENT> for bitnum in range(len(fp.ToHexString())-1): <NEW_LINE> <INDENT> four=str(mybin(int(fp.ToHexString()[bitnum],16))) <NEW_LINE> if bitnum!=(len(fp.ToHexString())-2): <NEW_LINE> <INDENT> for i in range(1,5): <NEW_LINE> <INDENT> if formats=='sdf': <NEW_LINE> <INDENT> OESetSDData(mol, str(bitnum*4+i),four[i-1]) <NEW_LINE> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile.write(str(four[i-1])) <NEW_LINE> csvfile.write(',') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(3,5): <NEW_LINE> <INDENT> if formats=='sdf': <NEW_LINE> <INDENT> OESetSDData(mol, str(bitnum*4+i-2),four[i-1]) <NEW_LINE> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile.write(str(four[i-1])) <NEW_LINE> if (bitnum*4+i-2)!=166: <NEW_LINE> <INDENT> csvfile.write(',') <NEW_LINE> <DEDENT> elif addsame!='false': <NEW_LINE> <INDENT> csvfile.write(',') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if addsame!='false': <NEW_LINE> <INDENT> if formats=='sdf': <NEW_LINE> <INDENT> OESetSDData(mol, 'Added',str(addsame)) <NEW_LINE> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile.write(str(addsame)) <NEW_LINE> <DEDENT> <DEDENT> if formats=='sdf': <NEW_LINE> <INDENT> OEWriteMolecule(ofs, mol) <NEW_LINE> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile.write('\n') <NEW_LINE> <DEDENT> <DEDENT> if formats=='csv': <NEW_LINE> <INDENT> csvfile.close() | Calculate the hex of binary MACCS and add an extra field
inputfile can be any format
outputfile should be sdf or csv
formats is for outputfile
fingerprint can be MACCSH or MACCSB
addsame is the content of the extra field, this is for unsupervised learning. No specific meaning | 625941b59f2886367277a683 |
def create_state(self): <NEW_LINE> <INDENT> pass | Factory method providing an interface for creating State instances
@return GameState :
@author | 625941b5f9cc0f698b1403f8 |
def is_cn_char(ch): <NEW_LINE> <INDENT> return ch >= u'\u4e00' and ch <= u'\u9fa5' | Test if a char is a Chinese character. | 625941b563d6d428bbe442e1 |
def py_string_to_ipy_string(str): <NEW_LINE> <INDENT> return read_python(BytesIO(str.encode())) | Read a string containing a regular Python script with special
formatting, and perform preprocessing on it. The result is a
string that conforms to the IPython notebook version 3 Python
script format. | 625941b550812a4eaa59c118 |
@contextmanager <NEW_LINE> def wait_for_element_visibility(driver, element, timeout=5): <NEW_LINE> <INDENT> yield <NEW_LINE> WebDriverWait(driver, timeout).until( visibility_of(element) ) | Explicitly wait while element becomes visible.
Args:
driver: selenium webdriver instance.
element: element we are waiting to be visible.
timeout: timeout for waiting.
Returns: | 625941b5be383301e01b527f |
def _apply_to_listeners(self, function, *args): <NEW_LINE> <INDENT> for type in self.listener: <NEW_LINE> <INDENT> getattr(self.listener[type], function)(*args) | Apply a simple function to all listeners. | 625941b582261d6c526ab295 |
def diameterOfBinaryTree(self, root): <NEW_LINE> <INDENT> def height(root): <NEW_LINE> <INDENT> if not root: return 0 <NEW_LINE> return 1 + max(height(root.left), height(root.right)) <NEW_LINE> <DEDENT> def r(root): <NEW_LINE> <INDENT> if not root: return 0 <NEW_LINE> left = r(root.left) <NEW_LINE> right = r(root.right) <NEW_LINE> return max(max(right , left), height(root.right) + height(root.left)) <NEW_LINE> <DEDENT> return r(root) | :type root: TreeNode
:rtype: int | 625941b54d74a7450ccd3fb4 |
def _send_message(self, message, with_last_error=False, check_primary=True): <NEW_LINE> <INDENT> if check_primary and not with_last_error and not self.is_primary: <NEW_LINE> <INDENT> raise AutoReconnect("not master") <NEW_LINE> <DEDENT> sock_info = self.__socket() <NEW_LINE> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> (request_id, data) = self.__check_bson_size(message) <NEW_LINE> sock_info.sock.sendall(data) <NEW_LINE> rv = None <NEW_LINE> if with_last_error: <NEW_LINE> <INDENT> response = self.__receive_message_on_socket(1, request_id, sock_info) <NEW_LINE> rv = self.__check_response_to_last_error(response) <NEW_LINE> <DEDENT> return rv <NEW_LINE> <DEDENT> except OperationFailure: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except (ConnectionFailure, socket.error) as e: <NEW_LINE> <INDENT> self.disconnect() <NEW_LINE> raise AutoReconnect(str(e)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> sock_info.close() <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self.__pool.maybe_return_socket(sock_info) | Say something to Mongo.
Raises ConnectionFailure if the message cannot be sent. Raises
OperationFailure if `with_last_error` is ``True`` and the
response to the getLastError call returns an error. Return the
response from lastError, or ``None`` if `with_last_error`
is ``False``.
:Parameters:
- `message`: message to send
- `with_last_error`: check getLastError status after sending the
message
- `check_primary`: don't try to write to a non-primary; see
kill_cursors for an exception to this rule | 625941b5167d2b6e31218990 |
def list_filesystem_snapshot_policies(self, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.list_filesystem_snapshot_policies_with_http_info(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.list_filesystem_snapshot_policies_with_http_info(**kwargs) <NEW_LINE> return data | List policies attached to filesystem snapshots.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.list_filesystem_snapshot_policies(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param list[str] policy_ids: A comma-separated list of policy IDs. This cannot be provided together with the policy names query parameters.
:param list[str] policy_names: A comma-separated list of policy names. This cannot be provided together with the policy ids query parameters.
:param list[str] member_ids: A comma-separated list of member ids. This cannot be provided together with the member names query parameters.
:param list[str] member_names: A comma-separated list of member names. This cannot be provided together with the member ids query parameters.
:param str filter: The filter to be used for query.
:param str sort: Sort the response by the specified fields (in descending order if '-' is appended to the field name).
:param int start: The offset of the first resource to return from a collection.
:param int limit: limit, should be >= 0
:param str token: An opaque token used to iterate over a collection. The token to use on the next request is returned in the `continuation_token` field of the result.
:return: PolicyObjectsResponse
If the method is called asynchronously,
returns the request thread. | 625941b530bbd722463cbbb4 |
def dynamicCompletion (self,event=None): <NEW_LINE> <INDENT> c,p,u = self.c,self.c.p,self.c.p.v.u <NEW_LINE> w = self.editWidget(event) <NEW_LINE> if not w: return <NEW_LINE> s = w.getAllText() <NEW_LINE> ins = w.getInsertPoint() <NEW_LINE> if 0 < ins < len(s) and not g.isWordChar(s[ins]): ins -= 1 <NEW_LINE> i,j = g.getWord(s,ins) <NEW_LINE> word = w.get(i,j) <NEW_LINE> aList = self.getDynamicList(w,word) <NEW_LINE> if not aList: return <NEW_LINE> if word in aList and len(aList) > 1: aList.remove(word) <NEW_LINE> prefix = reduce(g.longestCommonPrefix,aList) <NEW_LINE> if prefix.strip(): <NEW_LINE> <INDENT> ypos = w.getYScrollPosition() <NEW_LINE> b = c.undoer.beforeChangeNodeContents(p,oldYScroll=ypos) <NEW_LINE> p.b = p.b[:i] + prefix + p.b[j:] <NEW_LINE> w.setAllText(p.b) <NEW_LINE> w.setInsertPoint(i+len(prefix)) <NEW_LINE> w.setYScrollPosition(ypos) <NEW_LINE> c.undoer.afterChangeNodeContents(p, command='dabbrev-completion',bunch=b,dirtyVnodeList=[]) | dabbrev-completion
Insert the common prefix of all dynamic abbrev's matching the present word.
This corresponds to C-M-/ in Emacs. | 625941b54527f215b584c24e |
@login_required <NEW_LINE> def edit_occ(request, occurrence_id): <NEW_LINE> <INDENT> occurrence = get_object_or_404(Occurrence, id=occurrence_id) <NEW_LINE> if not occurrence.cancelled: <NEW_LINE> <INDENT> form = OccurrenceForm(data=request.POST or None, instance=occurrence) <NEW_LINE> if request.method == 'POST': <NEW_LINE> <INDENT> if form.is_valid(): <NEW_LINE> <INDENT> occurrence = form.save() <NEW_LINE> if not request.is_ajax(): <NEW_LINE> <INDENT> return HttpResponseRedirect(occurrence.get_absolute_url()) <NEW_LINE> <DEDENT> response = ({'success':'True'}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = errors_as_json(form) <NEW_LINE> <DEDENT> if request.is_ajax(): <NEW_LINE> <INDENT> json = simplejson.dumps(response, ensure_ascii=False) <NEW_LINE> return HttpResponse(json, mimetype="application/json") <NEW_LINE> <DEDENT> <DEDENT> return render_to_response('calendars/occurrence_edit.html', { 'occ_form': form, 'occurrence': occurrence, 'action' : occurrence.get_edit_url(), 'event' : occurrence.event, }, context_instance=RequestContext(request)) | edit a persisted occurrence | 625941b5a934411ee375148d |
def setHostNameIDL(self, sessionHandle, hostName): <NEW_LINE> <INDENT> pass | Parameters:
- sessionHandle
- hostName | 625941b59f2886367277a684 |
def check_inside_guest(ishotplug): <NEW_LINE> <INDENT> def _check_disk_in_guest(): <NEW_LINE> <INDENT> new_disk_num = len(vm.get_disks()) <NEW_LINE> if new_disk_num > ori_disk_num: <NEW_LINE> <INDENT> logging.debug("New disk is found in vm") <NEW_LINE> return True <NEW_LINE> <DEDENT> logging.debug("New disk is not found in vm") <NEW_LINE> return False <NEW_LINE> <DEDENT> vm_session = vm.wait_for_login() <NEW_LINE> status = _check_disk_in_guest() <NEW_LINE> vm_session.close() <NEW_LINE> msg1 = "Can't find the device in the guest" <NEW_LINE> msg2 = "Found the device in the guest unexpectedly" <NEW_LINE> if ((ishotplug and not status_error and not status) or (not ishotplug and status_error and not status)): <NEW_LINE> <INDENT> test.fail(msg1) <NEW_LINE> <DEDENT> if ((ishotplug and status_error and status) or (not ishotplug and not status_error and status)): <NEW_LINE> <INDENT> test.fail(msg2) | Check devices within the guest
:param ishotplug: True for hotplug, False for hotunplug
:raise: test.fail if the result is not expected | 625941b556ac1b37e6263fd3 |
def create_intervals(data): <NEW_LINE> <INDENT> if len(data) is 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> intervals = [] <NEW_LINE> start = None <NEW_LINE> stop = None <NEW_LINE> for i in range(min(data), max(data)+1): <NEW_LINE> <INDENT> if i in data: <NEW_LINE> <INDENT> stop = i <NEW_LINE> if not start: <NEW_LINE> <INDENT> start = i <NEW_LINE> <DEDENT> <DEDENT> elif start: <NEW_LINE> <INDENT> intervals.append(tuple([start, stop])) <NEW_LINE> start = None <NEW_LINE> stop = None <NEW_LINE> <DEDENT> <DEDENT> intervals.append(tuple([start, stop])) <NEW_LINE> return intervals | Create a list of intervals out of set of ints. | 625941b5be8e80087fb20a41 |
def set_vel(self, vel): <NEW_LINE> <INDENT> assert isinstance(vel, Vector) and len(vel) == 3 <NEW_LINE> self.vel = vel | Set the velocity in Cartesian coordinates. | 625941b599fddb7c1c9de186 |
def cleanup(): <NEW_LINE> <INDENT> if MOUNTED: <NEW_LINE> <INDENT> exe('umount ' + mntpath) <NEW_LINE> <DEDENT> exe('losetup --detach' + loop) <NEW_LINE> exe('rm -r ' + filepath) <NEW_LINE> exe('rmdir ' + mntpath) | Cleanup after script. | 625941b5f8510a7c17cf94f7 |
def extra(self, select=None, where=None, params=None, tables=None, order_by=None, select_params=None): <NEW_LINE> <INDENT> assert self.query.can_filter(), "Cannot change a query once a slice has been taken" <NEW_LINE> clone = self._clone() <NEW_LINE> clone.query.add_extra(select, select_params, where, params, tables, order_by) <NEW_LINE> return clone | Adds extra SQL fragments to the query. | 625941b53c8af77a43ae3591 |
def isReflected(self, points): <NEW_LINE> <INDENT> minX, maxX = float("inf"), float("-inf") <NEW_LINE> all = set() <NEW_LINE> for p in points: <NEW_LINE> <INDENT> minX = min(minX, p[0]) <NEW_LINE> maxX = max(maxX, p[0]) <NEW_LINE> all.add(tuple(p)) <NEW_LINE> <DEDENT> for p in points: <NEW_LINE> <INDENT> if (minX+maxX-p[0], p[1]) not in all: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | :type points: List[List[int]]
:rtype: bool | 625941b57c178a314d6ef24a |
def test_that_when_deleting_a_policy_succeeds_the_delete_policy_method_returns_true(boto_conn): <NEW_LINE> <INDENT> result = boto_iot.delete_policy(policyName='testpolicy', **pytest.conn_parameters) <NEW_LINE> assert result['deleted'] | tests True policy deleted. | 625941b5fbf16365ca6f5fad |
def add_job(self, job, args, envs, newid=None): <NEW_LINE> <INDENT> if newid is None: <NEW_LINE> <INDENT> newid = str(uuid.uuid4()) <NEW_LINE> <DEDENT> assert isinstance(job, jobthread.Job) <NEW_LINE> job.setup(self, newid) <NEW_LINE> jthread = threading.Thread(target=jobthread.begin, args=(job, args, envs)) <NEW_LINE> jthread.setDaemon(True) <NEW_LINE> jthread.start() <NEW_LINE> with self.lck: <NEW_LINE> <INDENT> self.jobthreads[newid] = jthread | Add a job to this server and start it
:param job: the job object
:param args: arguments for this job
:param envs: an environment dictionary if required
:param newid: An ID for the job, if omitted a unique string is generated
:return: | 625941b5d58c6744b4257a53 |
def main(): <NEW_LINE> <INDENT> set_infomation() <NEW_LINE> image_cap() <NEW_LINE> upload_files() | メイン関数 | 625941b5f548e778e58cd36e |
def init(_time_str: str=None): <NEW_LINE> <INDENT> global time_str <NEW_LINE> if _time_str: time_str = _time_str <NEW_LINE> else: time_str = datetime.utcnow().strftime("%Y%m%d-%H%M%S") | Sets the time string for the log file (i.e. "log-{time_str}.txt"). | 625941b5004d5f362079a12a |
def __init__(self,con,BPrIds): <NEW_LINE> <INDENT> self.con = con <NEW_LINE> self.cur = self.con.cursor() <NEW_LINE> self.BPrIds = BPrIds <NEW_LINE> self.setBasicData() | docstring for __init__ | 625941b501c39578d7e74c36 |
@pytest.fixture(scope='session') <NEW_LINE> def bottle_app(backend): <NEW_LINE> <INDENT> app = bottle.Bottle() <NEW_LINE> jwt_plugin = JWTProviderPlugin( keyword='jwt', auth_endpoint='/auth', backend=backend, fields=('username', 'password'), secret='my_secret', ttl=1, **{'id_field': 'username'} ) <NEW_LINE> app.install(jwt_plugin) <NEW_LINE> @app.get('/') <NEW_LINE> @jwt_auth_required <NEW_LINE> def private_resource(): <NEW_LINE> <INDENT> return {'user': bottle.request.get_user()} <NEW_LINE> <DEDENT> return webtest.TestApp(app) | pytest fixture for `bottle.Bottle` instance.
| 625941b567a9b606de4a7cb0 |
def poison_estimator( self, x: np.ndarray, y: np.ndarray, batch_size: int = 64, nb_epochs: int = 10, **kwargs ) -> "CLASSIFIER_TYPE": <NEW_LINE> <INDENT> train_data = np.copy(x) <NEW_LINE> train_labels = np.copy(y) <NEW_LINE> selected_indices = np.zeros(len(x)).astype(bool) <NEW_LINE> if len(self.pp_poison) == 1: <NEW_LINE> <INDENT> if type(self.target) is np.ndarray: <NEW_LINE> <INDENT> not_target = np.logical_not(np.all(y == self.target, axis=1)) <NEW_LINE> selected_indices[not_target] = np.random.uniform(size=sum(not_target)) < self.pp_poison[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for src, _ in self.target: <NEW_LINE> <INDENT> all_src = np.all(y == src, axis=1) <NEW_LINE> selected_indices[all_src] = np.random.uniform(size=sum(all_src)) < self.pp_poison[0] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for pp, (src, _) in zip(self.pp_poison, self.target): <NEW_LINE> <INDENT> all_src = np.all(y == src, axis=1) <NEW_LINE> selected_indices[all_src] = np.random.uniform(size=sum(all_src)) < pp <NEW_LINE> <DEDENT> <DEDENT> if type(self.target) is np.ndarray: <NEW_LINE> <INDENT> to_be_poisoned = train_data[selected_indices] <NEW_LINE> poison_data, poison_labels = self.poison(to_be_poisoned, y=self.target, broadcast=True) <NEW_LINE> poison_idxs = np.arange(len(x))[selected_indices] <NEW_LINE> for i, idx in enumerate(poison_idxs): <NEW_LINE> <INDENT> train_data[idx] = poison_data[i] <NEW_LINE> train_labels[idx] = poison_labels[i] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for src, tgt in self.target: <NEW_LINE> <INDENT> poison_mask = np.logical_and(selected_indices, np.all(y == src, axis=1)) <NEW_LINE> to_be_poisoned = train_data[poison_mask] <NEW_LINE> src_poison_data, src_poison_labels = self.poison(to_be_poisoned, y=shape_labels(tgt), broadcast=True) <NEW_LINE> train_data[poison_mask] = src_poison_data <NEW_LINE> train_labels[poison_mask] = src_poison_labels <NEW_LINE> <DEDENT> <DEDENT> is_backdoor = selected_indices.astype(int) <NEW_LINE> is_backdoor = np.fromfunction(lambda b_idx: np.eye(2)[is_backdoor[b_idx]], shape=(len(x),), dtype=int) <NEW_LINE> self.train_data = train_data <NEW_LINE> self.train_labels = train_labels <NEW_LINE> self.is_backdoor = is_backdoor <NEW_LINE> if isinstance(self.estimator, KerasClassifier): <NEW_LINE> <INDENT> self.embed_model.fit( train_data, y=[train_labels, is_backdoor], batch_size=batch_size, epochs=nb_epochs, **kwargs ) <NEW_LINE> params = self.estimator.get_params() <NEW_LINE> del params["model"] <NEW_LINE> return KerasClassifier(self.orig_model, **params) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("Currently only Keras is supported") | Train a poisoned model and return it
:param x: Training data
:param y: Training labels
:param batch_size: The size of the batches used for training
:param nb_epochs: The number of epochs to train for
:return: A classifier with embedded backdoors | 625941b54f6381625f114839 |
def make_module_extra(self): <NEW_LINE> <INDENT> txt = super(EB_LAMMPS, self).make_module_extra() <NEW_LINE> python = get_software_version('Python') <NEW_LINE> if python: <NEW_LINE> <INDENT> pyshortver = '.'.join(get_software_version('Python').split('.')[:2]) <NEW_LINE> pythonpath = os.path.join('lib', 'python%s' % pyshortver, 'site-packages') <NEW_LINE> txt += self.module_generator.prepend_paths('PYTHONPATH', [pythonpath]) <NEW_LINE> <DEDENT> return txt | Add install path to PYTHONPATH | 625941b5851cf427c661a30e |
def testCompression(self): <NEW_LINE> <INDENT> dirtools.os = os <NEW_LINE> dirtools.open = open <NEW_LINE> test_dir = '/tmp/test_dirtools' <NEW_LINE> if os.path.isdir(test_dir): <NEW_LINE> <INDENT> shutil.rmtree(test_dir) <NEW_LINE> <DEDENT> os.mkdir(test_dir) <NEW_LINE> with open(os.path.join(test_dir, 'file1'), 'wb') as f: <NEW_LINE> <INDENT> f.write(os.urandom(2 ** 10)) <NEW_LINE> <DEDENT> with open(os.path.join(test_dir, 'file2.pyc'), 'w') as f: <NEW_LINE> <INDENT> f.write('excluded') <NEW_LINE> <DEDENT> os.mkdir(os.path.join(test_dir, 'dir1')) <NEW_LINE> with open(os.path.join(test_dir, 'dir1/file1'), 'wb') as f: <NEW_LINE> <INDENT> f.write(os.urandom(2 ** 10)) <NEW_LINE> <DEDENT> cdir = dirtools.Dir(test_dir) <NEW_LINE> archive_path = cdir.compress_to() <NEW_LINE> tar = tarfile.open(archive_path) <NEW_LINE> test_dir_extract = '/tmp/test_dirtools_extract' <NEW_LINE> if os.path.isdir(test_dir_extract): <NEW_LINE> <INDENT> shutil.rmtree(test_dir_extract) <NEW_LINE> <DEDENT> os.mkdir(test_dir_extract) <NEW_LINE> tar.extractall(test_dir_extract) <NEW_LINE> extracted_dir = dirtools.Dir(test_dir_extract) <NEW_LINE> self.assertEqual(sorted(extracted_dir.files()), sorted(cdir.files())) <NEW_LINE> self.assertEqual(sorted(extracted_dir.subdirs()), sorted(cdir.subdirs())) <NEW_LINE> self.assertEqual(extracted_dir.hash(dirtools.filehash), cdir.hash(dirtools.filehash)) <NEW_LINE> shutil.rmtree(test_dir) <NEW_LINE> shutil.rmtree(test_dir_extract) <NEW_LINE> os.remove(archive_path) | Check the compression, withouth pyfakefs because it doesn't support tarfile. | 625941b5009cb60464c631b0 |
def update_virus_fields(database, virus_name, fields): <NEW_LINE> <INDENT> if not fields: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open(database, "r+") as db_file: <NEW_LINE> <INDENT> contents = db_file.readlines() <NEW_LINE> index = get_virus_index(contents, virus_name) <NEW_LINE> if index != 0 and index < len(contents): <NEW_LINE> <INDENT> existing_fields = get_fields(index+1, contents) <NEW_LINE> for field in fields: <NEW_LINE> <INDENT> field_elements = field.split('=') <NEW_LINE> if field_elements[0] in existing_fields: <NEW_LINE> <INDENT> print("Updating field {} with data {}".format(field_elements[0], field_elements[1])) <NEW_LINE> contents[existing_fields[field_elements[0]]] = "{} {}\n".format(field_elements[0], field_elements[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Virus {} does not have field '{}'.".format(virus_name, field_elements[0])) <NEW_LINE> <DEDENT> <DEDENT> db_file.seek(0,0) <NEW_LINE> db_file.write(''.join(contents)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print("Something went wrong in update_virus_fields") <NEW_LINE> traceback.print_exc(5) | update virus fields to have new value - does nothing if fields are not present | 625941b573bcbd0ca4b2be70 |
def mel_spectrogram(spectrogram, stft_channels=p.stft_channels, n_mels=p.n_mels, fmin=p.fmin, fmax=p.fmax, sr=p.sr): <NEW_LINE> <INDENT> if stft_channels != p.stft_channels or n_mels != p.n_mels or fmin != p.fmin or fmax != p.fmax or sr != p.sr: <NEW_LINE> <INDENT> mel_basis = librosa.filters.mel(sr=sr, n_fft=stft_channels, n_mels=n_mels, fmin=fmin, fmax=fmax) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mel_basis = p.mel_basis <NEW_LINE> <DEDENT> return np.dot(mel_basis, spectrogram) | Compute the mel spectrogram from a spectrogram. | 625941b51f037a2d8b945ff1 |
def entry_is_empty(data): <NEW_LINE> <INDENT> if data == None or data == "": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Tests for a null data field | 625941b57b180e01f3dc45f9 |
def fetch_data(self, sql_command): <NEW_LINE> <INDENT> cursor = self.connect_sqllite() <NEW_LINE> cursor.execute(sql_command) <NEW_LINE> results = cursor.fetchall() <NEW_LINE> return results | Excecutes the sqllite commands after a cursor with an established cursor.
: param cursor: An established cursor object between Python and sqllite.
: param sql_command: The sqllite command to be executed. | 625941b5377c676e91271f9d |
@bp.route('/reset_password/<token>', methods=['GET', 'POST']) <NEW_LINE> def reset_password(token): <NEW_LINE> <INDENT> if current_user.is_authenticated: <NEW_LINE> <INDENT> return redirect(url_for('coding.index')) <NEW_LINE> <DEDENT> user = User.verify_reset_password_token(token) <NEW_LINE> if not user: <NEW_LINE> <INDENT> return redirect(url_for('coding.index')) <NEW_LINE> <DEDENT> form = ResetPasswordForm() <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> user.set_password(form.password.data) <NEW_LINE> db.session.commit() <NEW_LINE> flash('Your password has been reset.') <NEW_LINE> return redirect(url_for('auth.login')) <NEW_LINE> <DEDENT> return render_template( 'auth/reset_password.html', form=form, ) | Allow a user to reset their password.
Args:
token (str): A reset token generated by
send_password_reset_email
Returns:
Redirect to reset password page when navigating to this route
for the first time.
Redirect to home page if user is already logged in.
Redirect to home page if reset token is not authenticated.
Redirect to login page if reset token is authenticated. | 625941b5796e427e537b03b5 |
def _reindent(text, indent): <NEW_LINE> <INDENT> if text: <NEW_LINE> <INDENT> return text.replace("\n", "\n" + indent) | Reindent `text`. | 625941b53539df3088e2e13e |
def get_ip_for_interface(self, network_interface): <NEW_LINE> <INDENT> if network_interface.startswith('0') or network_interface == '::': <NEW_LINE> <INDENT> return network_interface <NEW_LINE> <DEDENT> is_cidr = len(network_interface.split(".")) == 4 or len( network_interface.split(":")) == 8 <NEW_LINE> if is_cidr: <NEW_LINE> <INDENT> interfaces = netifaces.interfaces() <NEW_LINE> for interface in interfaces: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ip = netifaces.ifaddresses(interface)[2][0]['addr'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if ipaddress.ip_address(ip) in ipaddress.ip_network( network_interface): <NEW_LINE> <INDENT> return ip <NEW_LINE> <DEDENT> <DEDENT> raise BigtopError( u"This machine has no interfaces in CIDR range {}".format( network_interface)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ip = netifaces.ifaddresses(network_interface)[2][0]['addr'] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise BigtopError( u"This machine does not have an interface '{}'".format( network_interface)) <NEW_LINE> <DEDENT> return ip | Helper to return the ip address of this machine on a specific
interface.
@param str network_interface: either the name of the
interface, or a CIDR range, in which we expect the interface's
ip to fall. Also accepts 0.0.0.0 (and variants, like 0/0) as a
special case, which will simply return what you passed in. | 625941b5099cdd3c635f0a4f |
def handle(self, request): <NEW_LINE> <INDENT> entity = pop_path_info(request.environ) <NEW_LINE> if request.method == "OPTIONS": <NEW_LINE> <INDENT> return self.handle_options(request, entity) <NEW_LINE> <DEDENT> if not entity: <NEW_LINE> <INDENT> return self.handle_index() <NEW_LINE> <DEDENT> elif entity == "robots.txt": <NEW_LINE> <INDENT> return self.handle_robots() <NEW_LINE> <DEDENT> elif entity == "main.css": <NEW_LINE> <INDENT> return self.handle_css() <NEW_LINE> <DEDENT> elif entity == "img": <NEW_LINE> <INDENT> return self.handle_svg_img(pop_path_info(request.environ)) <NEW_LINE> <DEDENT> elif entity == "js": <NEW_LINE> <INDENT> return self.handle_js(pop_path_info(request.environ)) <NEW_LINE> <DEDENT> elif entity == 'api': <NEW_LINE> <INDENT> return self.handle_api(request) <NEW_LINE> <DEDENT> self.validate_entity(entity) <NEW_LINE> req_mbid = shift_path_info(request.environ) <NEW_LINE> self.validate_mbid(req_mbid) <NEW_LINE> mbid = self.resolve_mbid(entity, req_mbid) <NEW_LINE> filename = pop_path_info(request.environ) <NEW_LINE> if entity == 'release-group': <NEW_LINE> <INDENT> return self.handle_release_group(request, mbid, filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.handle_release(request, mbid, filename) | Handle a request, parse and validate arguments and dispatch the request. | 625941b57b180e01f3dc45fa |
def ExecuteTimeLoad(command): <NEW_LINE> <INDENT> browsers = command["--browsers"].split(",") <NEW_LINE> num_browsers = len(browsers) <NEW_LINE> if command["--browserversions"]: <NEW_LINE> <INDENT> browser_versions = command["--browserversions"].split(",") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> browser_versions = [None] * num_browsers <NEW_LINE> <DEDENT> if command["--browserpaths"]: <NEW_LINE> <INDENT> browser_paths = command["--browserpaths"].split(",") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> browser_paths = [None] * num_browsers <NEW_LINE> <DEDENT> if len(browser_versions) != num_browsers: <NEW_LINE> <INDENT> raise ValueError( "--browserversions must be same length as --browser_paths") <NEW_LINE> <DEDENT> if len(browser_paths) != num_browsers: <NEW_LINE> <INDENT> raise ValueError( "--browserversions must be same length as --browser_paths") <NEW_LINE> <DEDENT> if [b for b in browsers if b not in ["chrome", "ie", "firefox"]]: <NEW_LINE> <INDENT> raise ValueError("unknown browsers: %r" % b) <NEW_LINE> <DEDENT> scraper_list = [] <NEW_LINE> for b in xrange(num_browsers): <NEW_LINE> <INDENT> version = browser_versions[b] <NEW_LINE> if not version: version = None <NEW_LINE> scraper = scrapers.GetScraper( (browsers[b], version) ) <NEW_LINE> if not scraper: <NEW_LINE> <INDENT> raise ValueError("could not find scraper for (%r, %r)" % (browsers[b], version)) <NEW_LINE> <DEDENT> scraper_list.append(scraper) <NEW_LINE> <DEDENT> if command["--url"]: <NEW_LINE> <INDENT> url_list = [command["--url"]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> startline = command["--startline"] <NEW_LINE> if command["--count"]: <NEW_LINE> <INDENT> endline = startline+command["--count"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> endline = command["--endline"] <NEW_LINE> <DEDENT> url_list = [url.strip() for url in open(command["--list"], "r").readlines()[startline:endline]] <NEW_LINE> <DEDENT> log_file = open(command["--logfile"], "w") <NEW_LINE> log_file.write("URL") <NEW_LINE> for b in xrange(num_browsers): <NEW_LINE> <INDENT> log_file.write(",%s" % browsers[b]) <NEW_LINE> if browser_versions[b]: log_file.write(" %s" % browser_versions[b]) <NEW_LINE> <DEDENT> log_file.write("\n") <NEW_LINE> results = {} <NEW_LINE> for url in url_list: <NEW_LINE> <INDENT> results[url] = [None] * num_browsers <NEW_LINE> <DEDENT> for b in xrange(num_browsers): <NEW_LINE> <INDENT> result = scraper_list[b].Time(url_list, command["--size"], command["--timeout"], path=browser_paths[b]) <NEW_LINE> for (url, time) in result: <NEW_LINE> <INDENT> results[url][b] = time <NEW_LINE> <DEDENT> <DEDENT> for url in url_list: <NEW_LINE> <INDENT> log_file.write(url) <NEW_LINE> for b in xrange(num_browsers): <NEW_LINE> <INDENT> log_file.write(",%r" % results[url][b]) | Executes the TimeLoad command. | 625941b5a17c0f6771cbde47 |
def make_move(self): <NEW_LINE> <INDENT> raise NotImplementedError('make_move') | Return the next move to make. | 625941b5627d3e7fe0d68c41 |
def deletePlayers(): <NEW_LINE> <INDENT> conn = connect() <NEW_LINE> cursor = conn.cursor() <NEW_LINE> cursor.execute("TRUNCATE TABLE players CASCADE") <NEW_LINE> conn.commit() <NEW_LINE> conn.close() | Remove all the player records from the database. | 625941b5b7558d58953c4d0f |
def deployments_list(resource_group, **kwargs): <NEW_LINE> <INDENT> result = {} <NEW_LINE> resconn = __utils__['azurearm.get_client']('resource', **kwargs) <NEW_LINE> try: <NEW_LINE> <INDENT> deployments = __utils__['azurearm.paged_object_to_list']( resconn.deployments.list_by_resource_group( resource_group_name=resource_group ) ) <NEW_LINE> for deploy in deployments: <NEW_LINE> <INDENT> result[deploy['name']] = deploy <NEW_LINE> <DEDENT> <DEDENT> except CloudError as exc: <NEW_LINE> <INDENT> __utils__['azurearm.log_cloud_error']('resource', str(exc), **kwargs) <NEW_LINE> result = {'error': str(exc)} <NEW_LINE> <DEDENT> return result | .. versionadded:: Fluorine
List all deployments within a resource group.
CLI Example:
.. code-block:: bash
salt-call azurearm_resource.deployments_list testgroup | 625941b5097d151d1a222c4f |
def test_140_weight_read_only(self): <NEW_LINE> <INDENT> rule = Mock() <NEW_LINE> rule.ruletype = TERuletype.allow <NEW_LINE> rule.tclass = "infoflow" <NEW_LINE> rule.perms = set(["med_r", "hi_r"]) <NEW_LINE> permmap = PermissionMap("tests/perm_map") <NEW_LINE> r, w = permmap.rule_weight(rule) <NEW_LINE> self.assertEqual(r, 10) <NEW_LINE> self.assertEqual(w, 0) | PermMap get weight of read-only rule. | 625941b50c0af96317bb7fdc |
def glInitGl45VERSION(): <NEW_LINE> <INDENT> from OpenGL import extensions <NEW_LINE> return extensions.hasGLExtension( _EXTENSION_NAME ) | Return boolean indicating whether this extension is available | 625941b530bbd722463cbbb5 |
def process(self): <NEW_LINE> <INDENT> histo = copy.deepcopy(self.data['pitches.pitchClassHistogram']) <NEW_LINE> pIndexMax = histo.index(max(histo)) <NEW_LINE> pCountMax = histo[pIndexMax] <NEW_LINE> if not pCountMax: <NEW_LINE> <INDENT> raise JSymbolicFeatureException('input lacks notes') <NEW_LINE> <DEDENT> histo[pIndexMax] = 0 <NEW_LINE> pIndexSecond = histo.index(max(histo)) <NEW_LINE> pCountSecond = histo[pIndexSecond] <NEW_LINE> self.feature.vector[0] = pCountSecond / pCountMax | Do processing necessary, storing result in feature.
| 625941b538b623060ff0abe2 |
def entity_destination_location_type_get(self, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('async_req'): <NEW_LINE> <INDENT> return self.entity_destination_location_type_get_with_http_info(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.entity_destination_location_type_get_with_http_info(**kwargs) <NEW_LINE> return data | EntityDestinationLocationType_GET # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.entity_destination_location_type_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields:
:param str ordering:
:param str page_mode:
:return: object
If the method is called asynchronously,
returns the request thread. | 625941b5d58c6744b4257a54 |
def create(self): <NEW_LINE> <INDENT> c.default_extern_type = auth_internal.KallitheaAuthPlugin.name <NEW_LINE> c.default_extern_name = auth_internal.KallitheaAuthPlugin.name <NEW_LINE> user_model = UserModel() <NEW_LINE> user_form = UserForm()() <NEW_LINE> try: <NEW_LINE> <INDENT> form_result = user_form.to_python(dict(request.POST)) <NEW_LINE> user = user_model.create(form_result) <NEW_LINE> usr = form_result['username'] <NEW_LINE> action_logger(self.authuser, 'admin_created_user:%s' % usr, None, self.ip_addr, self.sa) <NEW_LINE> h.flash(h.literal(_('Created user %s') % h.link_to(h.escape(usr), url('edit_user', id=user.user_id))), category='success') <NEW_LINE> Session().commit() <NEW_LINE> <DEDENT> except formencode.Invalid as errors: <NEW_LINE> <INDENT> return htmlfill.render( render('admin/users/user_add.html'), defaults=errors.value, errors=errors.error_dict or {}, prefix_error=False, encoding="UTF-8", force_defaults=False) <NEW_LINE> <DEDENT> except UserCreationError as e: <NEW_LINE> <INDENT> h.flash(e, 'error') <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.error(traceback.format_exc()) <NEW_LINE> h.flash(_('Error occurred during creation of user %s') % request.POST.get('username'), category='error') <NEW_LINE> <DEDENT> raise HTTPFound(location=url('users')) | POST /users: Create a new item | 625941b5d8ef3951e3243330 |
def test_RestrictingNodeTransformer__visit_Name__3(): <NEW_LINE> <INDENT> result = compile_restricted_exec( BAD_NAME_OVERRIDE_OVERRIDE_GUARD_WITH_FUNCTION) <NEW_LINE> assert result.errors == ( 'Line 2: "_getattr" is an invalid variable name because it ' 'starts with "_"',) | It denies a function name starting in `_`. | 625941b5956e5f7376d70c6f |
def start_parsing(self): <NEW_LINE> <INDENT> if self.check_parser_start() is False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.parsing_control_button.config(state=tk.DISABLED) <NEW_LINE> self.parsing_control_button.update() <NEW_LINE> args = (self.window.characters_frame.characters, self.character_data, self.window.builds_frame.ships_data, self.window.builds_frame.companions_data) <NEW_LINE> if self.minimap_enabled.get() is True: <NEW_LINE> <INDENT> self.minimap = MiniMap(self.window) <NEW_LINE> <DEDENT> kwargs = { "spawn_callback": self.spawn_callback, "match_callback": self.match_callback, "file_callback": self.file_callback, "event_callback": self.event_callback, "minimap_share": self.minimap_enabled.get(), "minimap_user": self.minimap_name.get(), "minimap_address": self.minimap_address.get(), "minimap_window": self.minimap, "rpc": self.window.rpc, } <NEW_LINE> try: <NEW_LINE> <INDENT> self.parser = RealTimeParser(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> messagebox.showerror( "Error", "An error occurred during the initialization of the RealTimeParser. Please report the error given " "below, as well as, if possible, the full stack-trace to the developer.\n\n{}".format(e)) <NEW_LINE> raise <NEW_LINE> <DEDENT> self.parsing_control_button.config(text="Stop Parsing", command=self.stop_parsing) <NEW_LINE> self.watching_stringvar.set("Waiting for a CombatLog...") <NEW_LINE> self.open_overlay() <NEW_LINE> self.open_event_overlay() <NEW_LINE> self.update_data_string() <NEW_LINE> self.parser.start() <NEW_LINE> self._rtp_id = self.after(100, self.check_alive) <NEW_LINE> self.data_after_id = self.after(1000, self.update_data_string) <NEW_LINE> self.parsing_control_button.config(state=tk.NORMAL) <NEW_LINE> print("[RealTimeFrame] Parsing started. Threads: {}".format(threading.enumerate())) | Start the results process and open the Overlay | 625941b50fa83653e4656db1 |
def _get_smc_filename(self, ids): <NEW_LINE> <INDENT> return '%s - Situation map closeup.jpg' % self.short_name | Situation map closeup filename | 625941b515fb5d323cde08fc |
def changePassword(request): <NEW_LINE> <INDENT> user = username = old_password = new_password = confirm_password = None <NEW_LINE> if request.POST: <NEW_LINE> <INDENT> old_password = request.POST['old_password'] <NEW_LINE> new_password = request.POST['new_password'] <NEW_LINE> confirm_password = request.POST['confirm_password'] <NEW_LINE> if new_password != confirm_password: <NEW_LINE> <INDENT> pass_message = 'The new password doesn\'t match' <NEW_LINE> user = request.user <NEW_LINE> user_settings = UserSettings.objects.get(user=user) <NEW_LINE> ctxt = { 'pass_message': pass_message, 'user': user, 'user_settings': user_settings } <NEW_LINE> return render_to_response( 'account/edit_profile.html', context=ctxt, context_instance=RequestContext(request) ) <NEW_LINE> <DEDENT> elif request.user.is_authenticated(): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> username = request.user.username <NEW_LINE> user_auth = authenticate(username=username, password=old_password) <NEW_LINE> if user_auth is None: <NEW_LINE> <INDENT> pass_message = 'The old password is not correct' <NEW_LINE> user_settings = UserSettings.objects.get(user=user) <NEW_LINE> ctxt = { 'pass_message': pass_message, 'user': user, 'user_settings': user_settings } <NEW_LINE> return render_to_response( 'account/edit_profile.html', context=ctxt, context_instance=RequestContext(request) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user.set_password(new_password) <NEW_LINE> user.save() <NEW_LINE> new_user_session = authenticate( username=username, password=new_password ) <NEW_LINE> if new_user_session is not None: <NEW_LINE> <INDENT> if new_user_session.is_active: <NEW_LINE> <INDENT> login(request, new_user_session) <NEW_LINE> <DEDENT> <DEDENT> pass_message = 'Password updated successfully!' <NEW_LINE> user_settings = UserSettings.objects.get(user=new_user_session) <NEW_LINE> ctxt = { 'pass_message': pass_message, 'user': new_user_session, 'user_settings': user_settings } <NEW_LINE> return render_to_response( 'account/edit_profile.html', context=ctxt, context_instance=RequestContext(request) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return render_to_response( 'account/landing.html', context_instance=RequestContext(request) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return render_to_response( 'account/edit_profile.html', context_instance=RequestContext(request) ) | Change password function. | 625941b53346ee7daa2b2b5c |
def key_event_answer(key): <NEW_LINE> <INDENT> if key: <NEW_LINE> <INDENT> print(f"Key {action}: {key}") <NEW_LINE> check_target(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(f"Key {action}: ESC") <NEW_LINE> print("Script terminated.") <NEW_LINE> return False | If a key code is given, check if it is target, otherwise (ESC) quit.
Parameters:
key (str): Key code captured.
Returns:
bool: False only if key pressed/released was ESC. | 625941b52eb69b55b151c69d |
def test_version(): <NEW_LINE> <INDENT> assert roddy.__version__ | There should be a version associated with roddy. | 625941b585dfad0860c3ac4c |
def __init__(self, mainwindow, selection): <NEW_LINE> <INDENT> self.mainwindow = mainwindow <NEW_LINE> self.model = mainwindow.model <NEW_LINE> self.selection = selection <NEW_LINE> self.boxes = [self.create_info_box(elem) for elem in selection] <NEW_LINE> selection.inserted.connect(self._insert) <NEW_LINE> selection.removed.connect(self._delete) <NEW_LINE> selection.changed.connect(self._modify) <NEW_LINE> selection.cursor.changed.connect(self._cursor_changed) <NEW_LINE> self.event_filter = EventFilter({ 'WindowActivate': self._on_activate_box, 'Close': self._on_close_box, }) | Add toolbar tool to panel and subscribe to capture events. | 625941b5aad79263cf39082e |
def generate(self, seed: list, iteration_count: int, name: str, output: CustomTrackPoolInterface, track: CustomTrack1D = CustomTrack1D(8, 4, 4, [], "")) -> tuple: <NEW_LINE> <INDENT> iteration_seed = seed <NEW_LINE> generated = [] <NEW_LINE> raw = [] <NEW_LINE> for iteration in range(int(iteration_count / self.y_size)): <NEW_LINE> <INDENT> raw_division = self.model.predict(np.array([iteration_seed]), self.batch_size)[0].tolist() <NEW_LINE> raw += raw_division <NEW_LINE> division = [] <NEW_LINE> division += self.threshold_sequence_max_delta(raw_division) <NEW_LINE> iteration_seed += division <NEW_LINE> generated += division <NEW_LINE> iteration_seed = iteration_seed[self.y_size:] <NEW_LINE> <DEDENT> track.divisions = generated <NEW_LINE> track.name = name <NEW_LINE> if output is not None: <NEW_LINE> <INDENT> print("Saving in output") <NEW_LINE> output.put_track(track, raw) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("No output!") <NEW_LINE> <DEDENT> return seed, generated, raw | Метод генерации набора долей по сиду, составляет дорожку для трека и
возвращает раздельно (сид, сгенерированная часть)
:param track: Экземпляр Track, с заданными параметрами размера и разбиения,
используется в качестве контейнера сгененрированных данных для дальнейшей передачи в TrackPoolDoge
:param output: Интерфейс выходных данных для модели
:param name: имя трека при генерации. Присваивается треку для логирования, например, по принадлежности к сиду его же именем.
:param seed: входыне данные для начала генерации
:param iteration_count: количество долей для генерации (желательно кратно числу долей в такте),
должно быть больше чем y_size!
:return: (seed, generated, raw) | 625941b58e05c05ec3eea164 |
def lowess_xr(da, x_dset="date", min_days_weighted=2 * 365.25, frac=0.7, n_iter=2): <NEW_LINE> <INDENT> import xarray as xr <NEW_LINE> x = date2num(da[x_dset].values) <NEW_LINE> if min_days_weighted and min_days_weighted > 0: <NEW_LINE> <INDENT> frac = _find_frac(x, min_days_weighted) <NEW_LINE> <DEDENT> out_stack = lowess_stack(da.values, x, frac, n_iter) <NEW_LINE> out_da = xr.DataArray(out_stack, coords=da.coords, dims=da.dims) <NEW_LINE> out_da.attrs["description"] = "Lowess smoothed stack" <NEW_LINE> out_da = _write_attrs(out_da, frac=frac, n_iter=n_iter) | Run lowess on a DataArray stack.
Args:
da (xr.DataArray): 3D xarray containing data to be smoothed along dimension `x_dset`.
x_dset (str, optional): Name of the time dimension. Defaults to "date".
min_days_weighted (float, optional): Minimum time period of data to include in smoothing.
See notes. Defaults to 365.25*2 (2 years of data).
n_iter (int, optional): Number of LOWESS iterations to run to exclude outliers.
Defaults to 2.
Returns:
xr.DataArray: stack from `da` smoothed along the dimension `x_dset`.
Notes:
When sampling is irregular, specifying one fraction of data for lowess will lead to parts
of the smoothing using longer time intervals. `min_days_weighted` is used to specify the
minimum time desired for the smoothing. For example, if the data starts as sampled every
month, but then switches to sampling every 2-weeks, the fraction will be use the proportion
of data that is needed to include at least `min_days_weighted` days of data during the
2-week sampling time. | 625941b591af0d3eaac9b807 |
def cast(*args): <NEW_LINE> <INDENT> return _itkInterpolateImageFilterPython.itkInterpolateImageFilterIUS3IUS3_cast(*args) | cast(itkLightObject obj) -> itkInterpolateImageFilterIUS3IUS3 | 625941b557b8e32f52483293 |
def negloglikelihood(self, mu, S, pi): <NEW_LINE> <INDENT> assert(mu.shape == (self.K, self.D) and S.shape == (self.K, self.D, self.D) and pi.shape == (self.K, 1)) <NEW_LINE> nlogl= 0 <NEW_LINE> l=0 <NEW_LINE> for i in range(self.n): <NEW_LINE> <INDENT> for k in range(self.K): <NEW_LINE> <INDENT> l+=pi[k]*stats.multivariate_normal.pdf(self.X[i,:],mu[k,:],S[k,:,:]) <NEW_LINE> <DEDENT> nlogl+=(np.log(l))[0] <NEW_LINE> <DEDENT> return -nlogl | Compute the E step of the EM algorithm.
Arguments:
mu -- component means, K x D array
S -- component covariances, K x D x D array
pi -- component weights, K x 1 array
Returns:
nlogl -- negative log-likelihood, 1x 1 array | 625941b560cbc95b062c633c |
def test_pool_list(self): <NEW_LINE> <INDENT> self.client.login(email='[email protected]', password='foo-bar') <NEW_LINE> pool1 = create_pool(self.user) <NEW_LINE> library1 = create_library(get_random_name(), status=4) <NEW_LINE> library2 = create_library(get_random_name(), status=-1) <NEW_LINE> sample1 = create_sample(get_random_name(), status=4) <NEW_LINE> pool1.libraries.add(*[library1.pk, library2.pk]) <NEW_LINE> pool1.samples.add(sample1) <NEW_LINE> pool2 = create_pool(self.user) <NEW_LINE> library3 = create_library(get_random_name(), status=4) <NEW_LINE> sample2 = create_sample(get_random_name(), status=3) <NEW_LINE> pool2.libraries.add(library3) <NEW_LINE> pool2.samples.add(sample2.pk) <NEW_LINE> pool3 = create_pool(self.user) <NEW_LINE> sample3 = create_sample(get_random_name(), status=2) <NEW_LINE> sample4 = create_sample(get_random_name(), status=-1) <NEW_LINE> pool3.samples.add(*[sample3.pk, sample4.pk]) <NEW_LINE> response = self.client.get(reverse('flowcells-pool-list')) <NEW_LINE> data = response.json() <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> pools = [x['name'] for x in data] <NEW_LINE> self.assertIn(pool1.name, pools) <NEW_LINE> self.assertIn(pool2.name, pools) <NEW_LINE> self.assertNotIn(pool3.name, pools) <NEW_LINE> pool1_obj = [x for x in data if x['name'] == pool1.name][0] <NEW_LINE> pool2_obj = [x for x in data if x['name'] == pool2.name][0] <NEW_LINE> self.assertTrue(pool1_obj['ready']) <NEW_LINE> self.assertFalse(pool2_obj['ready']) <NEW_LINE> self.assertTrue(data[0]['read_length_name'], library1.read_length.name) <NEW_LINE> self.assertTrue(data[1]['read_length_name'], sample1.read_length.name) <NEW_LINE> self.assertTrue(data[0]['pool_size'], pool1.size.multiplier) <NEW_LINE> self.assertTrue(data[1]['pool_size'], pool2.size.multiplier) | Ensure get pool list behaves correctly. | 625941b5d7e4931a7ee9dd0f |
def stop(self): <NEW_LINE> <INDENT> return _atsc.atsc_randomizer_sptr_stop(self) | stop(self) -> bool | 625941b5a8370b7717052695 |
def angle(x, y): <NEW_LINE> <INDENT> tx_ty = len(x) * len(y) <NEW_LINE> if tx_ty == 0: <NEW_LINE> <INDENT> return math.pi / 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> intersection_size = len(x & y) <NEW_LINE> return math.acos(intersection_size / math.sqrt(tx_ty)) | Compute the angle between sets x and y.
Parameters
----------
x : set
describes the temrs in a tweet
y : set
describes the terms in a tweet
Returns
-------
angle : float
the angle between the two vectors in radians. | 625941b5d268445f265b4c68 |
def __del__(self): <NEW_LINE> <INDENT> if self._cleanup_session: <NEW_LINE> <INDENT> self._session.loop.run_until_complete(self._session.close()) | Cleanup the session if it was created here | 625941b58e71fb1e9831d5a1 |
def toggle_link_state(self, id, linked): <NEW_LINE> <INDENT> self._api.put('scanners/{}/link'.format(self._check('id', id, int)), json={'link': int(self._check('linked', linked, bool))}) | Toggles the scanner's activated state.
:devportal:`scanners: toggle-link-state <scanners-toggle-link-state>`
Args:
id (int): The unique identifier for the scanner
linked (bool):
The link status of the scanner. Setting to `False` will disable
the link, whereas setting to `True` will enable the link.
Returns:
:obj:`None`:
The status change was successful.
Examples:
to deactivate a linked scanner:
>>> tio.scanners.toggle_link_state(1, False) | 625941b5d18da76e235322c5 |
def keys(self): <NEW_LINE> <INDENT> return list(self.__slots__) | return a list of available tag names | 625941b5d10714528d5ffad2 |
def topic_send(self, exchange_name, topic, msg, timeout=None, retry=None): <NEW_LINE> <INDENT> exchange = kombu.entity.Exchange( name=exchange_name, type='topic', durable=self.amqp_durable_queues, auto_delete=self.amqp_auto_delete) <NEW_LINE> self._ensure_publishing(self._publish, exchange, msg, routing_key=topic, retry=retry) | Send a 'topic' message. | 625941b54c3428357757c11f |
def test_serializer_class(self): <NEW_LINE> <INDENT> class TestTaskViewSet(viewsets.NamedModelViewSet): <NEW_LINE> <INDENT> serializer_class = serializers.TaskSerializer <NEW_LINE> <DEDENT> viewset = TestTaskViewSet() <NEW_LINE> self.assertEquals(viewset.get_serializer_class(), serializers.TaskSerializer) <NEW_LINE> request = unittest.mock.MagicMock() <NEW_LINE> request.query_params = QueryDict('minimal=True') <NEW_LINE> viewset.request = request <NEW_LINE> self.assertEquals(viewset.get_serializer_class(), serializers.TaskSerializer) | Tests that get_serializer_class() returns the serializer_class attribute if it exists,
and that it doesn't error if no minimal serializer is defined, but minimal=True. | 625941b594891a1f4081b89c |
def setMinimumLabelHeight(self, height): <NEW_LINE> <INDENT> self._minimumLabelHeight = height | Returns the minimum label height for this axis.
:param height | <int> | 625941b576d4e153a657e924 |
def _acquire_restore(self, state): <NEW_LINE> <INDENT> pass | _acquire_restore(state) -> None
For internal use by `threading.Condition`. | 625941b5a05bb46b383ec622 |
def add(self, token, args): <NEW_LINE> <INDENT> optional = ('distribution_id', 'virt_storage_size', 'virt_ram', 'virt_type', 'kickstart_metadata', 'kernel_options', 'puppet_classes') <NEW_LINE> required = ('name', 'version', 'valid_targets', 'is_container') <NEW_LINE> self.validate(args, required) <NEW_LINE> session = db.open_session() <NEW_LINE> try: <NEW_LINE> <INDENT> profile = db.Profile() <NEW_LINE> profile.update(args) <NEW_LINE> session.save(profile) <NEW_LINE> session.flush() <NEW_LINE> self.cobbler_sync(profile.get_hash()) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() | Create a profile.
@param token: A security token.
@type token: string
@param args: Profile attributes.
@type args: dict
@raise SQLException: On database error | 625941b50a50d4780f666c83 |
def is_valid_email(string): <NEW_LINE> <INDENT> return re.match('^[(a-z0-9\_\-\.)]+@[(a-z0-9\_\-\.)]+\.[(a-z)]{2,4}$', string.lower()) | Check if an string is a valid email address.
Arguments:
string: The text to be validated
Return:
True if is a valid email address or False if is not. | 625941b5d6c5a10208143e3b |
def test_clone_unmanaged(): <NEW_LINE> <INDENT> xmlpath = CLI_XMLDIR + "clone-disk.xml" <NEW_LINE> conn = utils.URIs.open_testdefault_cached() <NEW_LINE> xml = open(xmlpath).read() <NEW_LINE> tmp1 = tempfile.NamedTemporaryFile() <NEW_LINE> tmp2 = tempfile.NamedTemporaryFile() <NEW_LINE> inp1 = os.path.abspath(__file__) <NEW_LINE> inp2 = xmlpath <NEW_LINE> xml = xml.replace("/tmp/__virtinst_cli_exist1.img", inp1) <NEW_LINE> xml = xml.replace("/tmp/__virtinst_cli_exist2.img", inp2) <NEW_LINE> cloner = Cloner(conn, src_xml=xml) <NEW_LINE> diskinfos = cloner.get_nonshare_diskinfos() <NEW_LINE> assert len(diskinfos) == 2 <NEW_LINE> diskinfos[0].set_new_path(tmp1.name, False) <NEW_LINE> diskinfos[1].set_new_path(tmp2.name, False) <NEW_LINE> cloner.prepare() <NEW_LINE> cloner.start_duplicate(None) <NEW_LINE> assert open(tmp1.name).read() == open(inp1).read() <NEW_LINE> assert open(tmp2.name).read() == open(inp2).read() | Test that unmanaged storage duplication via the clone wizard
actually copies data | 625941b510dbd63aa1bd29a4 |
def aperture82(self, p1=None, p2=None, p3=None, p4=None, verbose=False) : <NEW_LINE> <INDENT> if p2 is None: <NEW_LINE> <INDENT> p2= 0.0 <NEW_LINE> <DEDENT> if p3 is None: <NEW_LINE> <INDENT> p3= 1e-2 <NEW_LINE> <DEDENT> p1half= 0.5 * p1 <NEW_LINE> k= 2.0 * np.pi/self.wavelength <NEW_LINE> print('vertical slit with round edges (vwidth, vpos, edge_radius): ', p1, p2, p3) <NEW_LINE> print('routine not debugged- probably not correct') <NEW_LINE> Ny= len(self.y_vec) <NEW_LINE> Nz= len(self.z_vec) <NEW_LINE> T= np.zeros((Ny, Nz), dtype=complex)+ 1+ 0j <NEW_LINE> for row in np.arange(Ny) : <NEW_LINE> <INDENT> for col in np.arange(Nz) : <NEW_LINE> <INDENT> T[row, col]= complex(np.cos(p3), np.sin(p3)) <NEW_LINE> <DEDENT> <DEDENT> return T | helper function aperture82 | 625941b5d99f1b3c44c67390 |
def configure_hook(app): <NEW_LINE> <INDENT> @app.before_request <NEW_LINE> def before_request(): <NEW_LINE> <INDENT> g.user = current_user | Setup always available pre request context values
:param app: Application instance
:type app: Flask | 625941b5b5575c28eb68ddf1 |
def parse_option(): <NEW_LINE> <INDENT> usage = 'usage: %prog [options] keyword' <NEW_LINE> parser = optparse.OptionParser(usage) <NEW_LINE> parser.add_option('-p', '--path', default=None) <NEW_LINE> parser.add_option('-n', '--rotation-number', default=3, type='int') <NEW_LINE> parser.add_option('-l', '--logfile-path', default='./rotation.log') <NEW_LINE> parser.add_option('-f', '--force-rotate', default=False) <NEW_LINE> options, args = parser.parse_args() <NEW_LINE> return options | Parse option.
A lot of system Python's version is still 2.6.
`argparse` requires Python2.7, so use optparse instead. | 625941b5be383301e01b5282 |
def __init__(self, players, num_owners, money, roster): <NEW_LINE> <INDENT> self.owners = [Owner(money, roster, i) for i in range(num_owners)] <NEW_LINE> self.players = players <NEW_LINE> self.players.sort(key=lambda player: player.value, reverse=True) <NEW_LINE> self.undrafted_players = list(players) <NEW_LINE> self.undrafted_players.sort(key=lambda player: player.value, reverse=True) <NEW_LINE> self.money = money <NEW_LINE> self.roster = roster <NEW_LINE> self.state = AuctionState.NOMINATE <NEW_LINE> self.turn_index = 0 <NEW_LINE> self.nominee = None <NEW_LINE> self.tickbids = [0] * num_owners <NEW_LINE> self.bids = [0] * num_owners <NEW_LINE> self.bid = None <NEW_LINE> self._nominee_index = -1 <NEW_LINE> self._player_ownership = [-1 for player in self.players] | Starts the auction with the specified settings.
:param list(Player) players: Players in this auction
:param int num_owners: number of owners. Owners are referenced by integer id.
:param int money: integer dollar amount of money each player has
:param list(RosterPosition) roster:
list of RosterPositions each player needs to fill | 625941b599fddb7c1c9de187 |
def check_domuuid_compliant_with_rfc4122(dom_uuid_value): <NEW_LINE> <INDENT> dom_uuid_segments = dom_uuid_value.split('-') <NEW_LINE> return dom_uuid_segments[2].startswith('4') and dom_uuid_segments[3][0] in '89ab' | Check the domain uuid format comply with RFC4122.
xxxxxxxx-xxxx-Axxx-Bxxx-xxxxxxxxxxxx
A should be RFC version number, since the compliant RFC version is 4122,
so it should be number 4.
B should be one of "8, 9, a or b".
:param dom_uuid_value: value of domain uuid
:return: True or False indicate whether it is compliant with RFC 4122. | 625941b5627d3e7fe0d68c42 |
def assert_geotiff_basics( output_tiff: Union[str, Path], expected_band_count=1, min_width=64, min_height=64, expected_shape=None ): <NEW_LINE> <INDENT> assert imghdr.what(output_tiff) == 'tiff' <NEW_LINE> with rasterio.open(output_tiff) as dataset: <NEW_LINE> <INDENT> assert dataset.width > min_width <NEW_LINE> assert dataset.height > min_height <NEW_LINE> if expected_shape is not None: <NEW_LINE> <INDENT> assert (dataset.count, dataset.height, dataset.width) == expected_shape <NEW_LINE> <DEDENT> elif expected_band_count is not None: <NEW_LINE> <INDENT> assert dataset.count == expected_band_count | Basic checks that a file is a readable GeoTIFF file | 625941b599cbb53fe67929db |
def compute_distances_three_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dim = X.shape[1] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> for j in range(num_train): <NEW_LINE> <INDENT> for k in range(dim): <NEW_LINE> <INDENT> dists[i][j] += (X[i][k] - self.X_train[j][k]) ** 2 <NEW_LINE> <DEDENT> dists[i][j] = np.sqrt(dists[i][j]) <NEW_LINE> <DEDENT> <DEDENT> return dists | Compute the distance between each test point in X and each training point
in self.X_train using nested loops over the training data, the test data,
and each element of their respective data.
Inputs:
- X: A numpy array of shape (num_test, D) containing test data.
Returns:
- dists: A numpy array of shape (num_test, num_train) where dists[i, j]
is the Euclidean distance between the ith test point and the jth training
point. | 625941b54d74a7450ccd3fb7 |
def tr(self, message): <NEW_LINE> <INDENT> return QCoreApplication.translate('dxftoshp', message) | Get the translation for a string using Qt translation API.
We implement this ourselves since we do not inherit QObject.
:param message: String for translation.
:type message: str, QString
:returns: Translated version of message.
:rtype: QString | 625941b58e7ae83300e4adc0 |
def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return ClientSeriesReportItem( start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), client_id = 56, client_name = '0', span_seconds = 56 ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return ClientSeriesReportItem( ) | Test ClientSeriesReportItem
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included | 625941b5c4546d3d9de7282b |
def update_user_info(self, token, data=None): <NEW_LINE> <INDENT> url = url_for('user_detail') <NEW_LINE> return self.client.put(url, data=data, headers={self.header_name: token}) | Helper method to update user details. | 625941b5293b9510aa2c308d |
def yy(): <NEW_LINE> <INDENT> pass | Return calendar year as a 2 digit string
Permission -- Always available | 625941b53539df3088e2e13f |
def build(self): <NEW_LINE> <INDENT> h, w = self.cf.patch_size[:2] <NEW_LINE> if h / 2 ** 5 != int(h / 2 ** 5) or w / 2 ** 5 != int(w / 2 ** 5): <NEW_LINE> <INDENT> raise Exception("Image size must be dividable by 2 at least 5 times " "to avoid fractions when downscaling and upscaling." "For example, use 256, 320, 384, 448, 512, ... etc. ") <NEW_LINE> <DEDENT> conv = mutils.NDConvGenerator(self.cf.dim) <NEW_LINE> backbone = utils.import_module('bbone', self.cf.backbone_path) <NEW_LINE> self.np_anchors = mutils.generate_pyramid_anchors(self.logger, self.cf) <NEW_LINE> self.anchors = torch.from_numpy(self.np_anchors).float().cuda() <NEW_LINE> self.Fpn = backbone.FPN(self.cf, conv, operate_stride1=self.cf.operate_stride1) <NEW_LINE> self.Classifier = Classifier(self.cf, conv) <NEW_LINE> self.BBRegressor = BBRegressor(self.cf, conv) <NEW_LINE> self.final_conv = conv(self.cf.end_filts, self.cf.num_seg_classes, ks=1, pad=0, norm=None, relu=None) | Build Retina Net architecture. | 625941b5f8510a7c17cf94f9 |
def is_same(arr1, arr2): <NEW_LINE> <INDENT> if isinstance(arr1, (list, tuple)): <NEW_LINE> <INDENT> return all([is_same(a1, a2) for a1, a2 in zip(arr1, arr2)]) <NEW_LINE> <DEDENT> return np.allclose(arr1, arr2) | Recursively check if 2 lists of array are equal. | 625941b563f4b57ef0000f17 |
def ellip(N, rp, rs, Wn, btype='low', analog=False, output='ba'): <NEW_LINE> <INDENT> return iirfilter(N, Wn, rs=rs, rp=rp, btype=btype, analog=analog, output=output, ftype='elliptic') | Elliptic (Cauer) digital and analog filter design.
Design an Nth order digital or analog elliptic filter and return
the filter coefficients in (B,A) or (Z,P,K) form.
Parameters
----------
N : int
The order of the filter.
rp : float
Provides the maximum ripple in the passband. (dB)
rs : float
Provides the minimum attenuation in the stop band. (dB)
Wn : array_like
A scalar or length-2 sequence giving the critical frequencies.
For digital filters, `Wn` is normalized from 0 to 1, where 1 is the
Nyquist frequency, pi radians / sample. (`Wn` is thus in
half-cycles / sample.)
For analog filters, `Wn` is in radians / second.
btype : {'lowpass', 'highpass', 'bandpass', 'bandstop'}, optional
The type of filter. Default is 'lowpass'.
analog : bool, optional
When True, return an analog filter, otherwise a digital filter is
returned.
output : {'ba', 'zpk'}, optional
Type of output: numerator/denominator ('ba') or pole-zero ('zpk').
Default is 'ba'.
Returns
-------
b, a : ndarray, ndarray
Numerator (`b`) and denominator (`a`) polynomials of the IIR filter.
Only returned if ``output='ba'``.
z, p, k : ndarray, ndarray, float
Zeros, poles, and system gain of the IIR filter transfer
function. Only returned if ``output='zpk'``.
See also
--------
ellipord | 625941b5a4f1c619b28afe37 |
def analyseEmotion(data): <NEW_LINE> <INDENT> matrix = {} <NEW_LINE> sid = SentimentIntensityAnalyzer() <NEW_LINE> previous_emotion = '' <NEW_LINE> previous_type = '' <NEW_LINE> for line in data: <NEW_LINE> <INDENT> if line['TURNTYPE'] in ['WYSIWYG', 'TBT']: <NEW_LINE> <INDENT> sentence = line['Translation'] <NEW_LINE> ss = sid.polarity_scores(sentence) <NEW_LINE> if ss['neg'] < ss['pos']: <NEW_LINE> <INDENT> current_emotion = 'pos' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_emotion = 'neg' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> matrix[line['INTERFACEUSED']]['total'] += 1 <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> matrix[line['INTERFACEUSED']] = {} <NEW_LINE> matrix[line['INTERFACEUSED']]['total'] = 1 <NEW_LINE> <DEDENT> if current_emotion != previous_emotion: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> matrix[line['INTERFACEUSED']]['sum'] += 1 <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> matrix[line['INTERFACEUSED']]['sum'] = 1 <NEW_LINE> <DEDENT> <DEDENT> if previous_type != line['INTERFACEUSED'] and current_emotion != previous_emotion: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> matrix[line['INTERFACEUSED']]['sum'] -= 1 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> previous_emotion = current_emotion <NEW_LINE> previous_type = line['INTERFACEUSED'] <NEW_LINE> <DEDENT> <DEDENT> for type in matrix: <NEW_LINE> <INDENT> average = matrix[type]['sum'] / matrix[type]['total'] <NEW_LINE> print( f'Total amount of emotional changes for {type}: {matrix[type]["sum"]}\nTotal amount of lines for {type}: {matrix[type]["total"]}\nAverage emotional change per line for {type}: {average}\n') | Average change of emotion per line per interface type | 625941b5fbf16365ca6f5faf |
def softsign(features, name=None): <NEW_LINE> <INDENT> _ctx = _context.context() <NEW_LINE> if _ctx.in_graph_mode(): <NEW_LINE> <INDENT> _, _, _op = _op_def_lib._apply_op_helper( "Softsign", features=features, name=name) <NEW_LINE> _result = _op.outputs[:] <NEW_LINE> _inputs_flat = _op.inputs <NEW_LINE> _attrs = ("T", _op.get_attr("T")) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _attr_T, (features,) = _execute.args_to_matching_eager([features], _ctx) <NEW_LINE> _attr_T = _attr_T.as_datatype_enum <NEW_LINE> _inputs_flat = [features] <NEW_LINE> _attrs = ("T", _attr_T) <NEW_LINE> _result = _execute.execute(b"Softsign", 1, inputs=_inputs_flat, attrs=_attrs, ctx=_ctx, name=name) <NEW_LINE> <DEDENT> _execute.record_gradient( "Softsign", _inputs_flat, _attrs, _result, name) <NEW_LINE> _result, = _result <NEW_LINE> return _result | Computes softsign: `features / (abs(features) + 1)`.
Args:
features: A `Tensor`. Must be one of the following types: `float32`, `float64`, `int32`, `int64`, `uint8`, `int16`, `int8`, `uint16`, `half`.
name: A name for the operation (optional).
Returns:
A `Tensor`. Has the same type as `features`. | 625941b57b25080760e39250 |
def sanitize(name): <NEW_LINE> <INDENT> return name.replace( "<", "<" ).replace( ">", ">" ).replace( "&", "&" ).replace( "< ", "<" ).replace( " >", ">" ).replace( " &", "&" ).replace( "& ", "&" ) | Sanitize the specified ``name`` for use with breathe directives.
**Parameters**
``name`` (:class:`python:str`)
The name to be sanitized.
**Return**
:class:`python:str`
The input ``name`` sanitized to use with breathe directives (primarily for use
with ``.. doxygenfunction::``). Replacements such as ``"<" -> "<"`` are
performed, as well as removing spaces ``"< " -> "<"`` must be done. Breathe is
particularly sensitive with respect to whitespace. | 625941b57047854f462a1202 |
def attach(self, ctx, cluster, data): <NEW_LINE> <INDENT> cluster.heathy_check_enable() <NEW_LINE> cluster.heathy_check_set_interval(self.interval) <NEW_LINE> return True | Hook for policy attach.
Initialize the health check mechanism for existing nodes in cluster. | 625941b5d58c6744b4257a55 |
def find_common_prefix(strs): <NEW_LINE> <INDENT> common = [] <NEW_LINE> for cgroup in izip(*strs): <NEW_LINE> <INDENT> if all(x == cgroup[0] for x in cgroup[1:]): <NEW_LINE> <INDENT> common.append(cgroup[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return ''.join(common) | Given a list (iterable) of strings, return the longest common prefix.
>>> find_common_prefix(['abracadabra', 'abracadero', 'abranch'])
'abra'
>>> find_common_prefix(['abracadabra', 'abracadero', 'mt. fuji'])
'' | 625941b56fb2d068a760ee96 |
def get_options(): <NEW_LINE> <INDENT> global opt <NEW_LINE> parser = argparse.ArgumentParser(description='Create PDF booklet') <NEW_LINE> parser.add_argument('file_in', help="Name of the input PDF file") <NEW_LINE> parser.add_argument('--debug', action="store_true", dest='debug', required=False, help="Additional features for debugging") <NEW_LINE> opt = parser.parse_args() | Parses the command line options | 625941b571ff763f4b549483 |
def getAction(self, gameState): <NEW_LINE> <INDENT> def expectimax(gameState): <NEW_LINE> <INDENT> actions = gameState.getLegalActions(0) <NEW_LINE> maxVal = float("-inf") <NEW_LINE> action = None <NEW_LINE> for each in actions: <NEW_LINE> <INDENT> succState = gameState.generateSuccessor(0, each) <NEW_LINE> v = self.minValue(succState, 0, 1) <NEW_LINE> if v > maxVal: <NEW_LINE> <INDENT> maxVal = v <NEW_LINE> action = each <NEW_LINE> <DEDENT> <DEDENT> return action <NEW_LINE> <DEDENT> return expectimax(gameState) | Returns the expectimax action using self.depth and self.evaluationFunction
All ghosts should be modeled as choosing uniformly at random from their
legal moves. | 625941b5f548e778e58cd370 |
def runTest(self): <NEW_LINE> <INDENT> find_q = worker.WorkQueue() <NEW_LINE> tag_q = worker.WorkQueue() <NEW_LINE> for rootdir in self.rootdirs: <NEW_LINE> <INDENT> find_q.put(rootdir) <NEW_LINE> <DEDENT> find_worker = find.Find(find_q, tag_q) <NEW_LINE> find_worker.start() <NEW_LINE> find_q.join() <NEW_LINE> find_worker.terminate() <NEW_LINE> self.assertEqual(tag_q.qsize(), (self.get_numroots() + self.get_numroots() * self.get_numdirs() + self.get_numroots() * self.get_numdirs() * self.get_numfiles()), "Failed to find all directories and files in the temp dir") <NEW_LINE> time.sleep(1) <NEW_LINE> self.assertFalse(find_worker.is_alive(), "Find has not terminated") | Simple test for the Find worker. | 625941b5462c4b4f79d1d4c4 |
def relu_backward(dout, cache): <NEW_LINE> <INDENT> x = cache <NEW_LINE> dx = (x > 0).astype(x.dtype) * dout <NEW_LINE> return dx | Computes the backward pass for a layer of rectified linear units (ReLUs).
Input:
- dout: Upstream derivatives, of any shape
- cache: Input x, of same shape as dout
Returns:
- dx: Gradient with respect to x | 625941b597e22403b379cd8d |
def to_file(self, fpath): <NEW_LINE> <INDENT> return utils.dict_to_json_file(self._dag_dict, fpath) | Save the DAG to the .dagpy file. | 625941b567a9b606de4a7cb1 |
@VALIDATOR <NEW_LINE> def state(row, index): <NEW_LINE> <INDENT> value = _read_value(row[index]) <NEW_LINE> state = us.states.lookup(value) <NEW_LINE> if state: <NEW_LINE> <INDENT> return state.abbr <NEW_LINE> <DEDENT> return value | Return the two-letter code for the given state.
| 625941b5009cb60464c631b2 |
def get_simple_name(self): <NEW_LINE> <INDENT> return self.simple_name | :returns: StringType -- the file name without the file path included. | 625941b5bde94217f3682bf1 |
def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> self.admin_user = get_user_model().objects.create_superuser( email='[email protected]', password='aim12345' ) <NEW_LINE> self.client.force_login(self.admin_user) <NEW_LINE> self.user = get_user_model().objects.create_user( email='[email protected]', password='aim56789', name='ahmad' ) | Creating test client, adding a new user,
make sure that user is logged inot our client test,
regular user that is not authintecated | 625941b53346ee7daa2b2b5d |
def check_config(reporter, source_dir): <NEW_LINE> <INDENT> config_file = os.path.join(source_dir, '_config.yml') <NEW_LINE> config = load_yaml(config_file) <NEW_LINE> reporter.check_field(config_file, 'configuration', config, 'kind', 'lesson') <NEW_LINE> reporter.check_field(config_file, 'configuration', config, 'carpentry', ('swc', 'dc', 'lc', 'cp', 'incubator')) <NEW_LINE> reporter.check_field(config_file, 'configuration', config, 'title') <NEW_LINE> reporter.check_field(config_file, 'configuration', config, 'email') <NEW_LINE> for defaults in [ {'values': {'root': '.', 'layout': 'page'}}, {'values': {'root': '..', 'layout': 'episode'}, 'scope': {'type': 'episodes', 'path': ''}}, {'values': {'root': '..', 'layout': 'page'}, 'scope': {'type': 'extras', 'path': ''}} ]: <NEW_LINE> <INDENT> reporter.check(defaults in config.get('defaults', []), 'configuration', '"root" not set to "." in configuration') <NEW_LINE> <DEDENT> return config['life_cycle'] | Check configuration file. | 625941b53317a56b86939a5f |
def resnet50(pretrained=False, **kwargs): <NEW_LINE> <INDENT> model = ResNet_Mnist(Bottleneck, [3, 4, 6, 3], **kwargs) <NEW_LINE> if pretrained: <NEW_LINE> <INDENT> model.load_state_dict(model_zoo.load_url(model_urls['resnet50'])) <NEW_LINE> <DEDENT> return model | Constructs a ResNet-50 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet | 625941b599cbb53fe67929dc |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.