code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
@main.command() <NEW_LINE> @url_option <NEW_LINE> @click.option('--config', '-c', help='Module containing RQ settings.') <NEW_LINE> @click.option('--burst', '-b', is_flag=True, help='Run in burst mode (quit after all work is done)') <NEW_LINE> @click.option('--name', '-n', help='Specify a different name') <NEW_LINE> @click.option('--worker-class', '-w', default='rq.Worker', help='RQ Worker class to use') <NEW_LINE> @click.option('--job-class', '-j', default='rq.job.Job', help='RQ Job class to use') <NEW_LINE> @click.option('--queue-class', default='rq.Queue', help='RQ Queue class to use') <NEW_LINE> @click.option('--path', '-P', default='.', help='Specify the import path.') <NEW_LINE> @click.option('--results-ttl', help='Default results timeout to be used') <NEW_LINE> @click.option('--worker-ttl', type=int, help='Default worker timeout to be used') <NEW_LINE> @click.option('--verbose', '-v', is_flag=True, help='Show more output') <NEW_LINE> @click.option('--quiet', '-q', is_flag=True, help='Show less output') <NEW_LINE> @click.option('--sentry-dsn', envvar='SENTRY_DSN', help='Report exceptions to this Sentry DSN') <NEW_LINE> @click.option('--pid', help='Write the process ID number to a file at the specified path') <NEW_LINE> @click.argument('queues', nargs=-1) <NEW_LINE> def worker(url, config, burst, name, worker_class, job_class, queue_class, path, results_ttl, worker_ttl, verbose, quiet, sentry_dsn, pid, queues): <NEW_LINE> <INDENT> if path: <NEW_LINE> <INDENT> sys.path = path.split(':') + sys.path <NEW_LINE> <DEDENT> settings = read_config_file(config) if config else {} <NEW_LINE> url = url or settings.get('REDIS_URL') <NEW_LINE> queues = queues or settings.get('QUEUES', ['default']) <NEW_LINE> sentry_dsn = sentry_dsn or settings.get('SENTRY_DSN') <NEW_LINE> if pid: <NEW_LINE> <INDENT> with open(os.path.expanduser(pid), "w") as fp: <NEW_LINE> <INDENT> fp.write(str(os.getpid())) <NEW_LINE> <DEDENT> <DEDENT> setup_loghandlers_from_args(verbose, quiet) <NEW_LINE> conn = connect(url) <NEW_LINE> cleanup_ghosts(conn) <NEW_LINE> worker_class = import_attribute(worker_class) <NEW_LINE> queue_class = import_attribute(queue_class) <NEW_LINE> try: <NEW_LINE> <INDENT> queues = [queue_class(queue, connection=conn) for queue in queues] <NEW_LINE> w = worker_class(queues, name=name, connection=conn, default_worker_ttl=worker_ttl, default_result_ttl=results_ttl, job_class=job_class) <NEW_LINE> if sentry_dsn: <NEW_LINE> <INDENT> from raven import Client <NEW_LINE> from rq.contrib.sentry import register_sentry <NEW_LINE> client = Client(sentry_dsn) <NEW_LINE> register_sentry(client, w) <NEW_LINE> <DEDENT> w.work(burst=burst) <NEW_LINE> <DEDENT> except ConnectionError as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> sys.exit(1)
Starts an RQ worker.
625941ba5fcc89381b1e1563
def normalize_path(path): <NEW_LINE> <INDENT> return path.strip('\\').strip('/')
Normalizes a path, mainly for the purpose of stripping redundant path separators from it.
625941ba76e4537e8c351517
def unpack(self, msg): <NEW_LINE> <INDENT> msg._fields = {} <NEW_LINE> formats = self.msg_format.split(',') <NEW_LINE> buf = msg._buf[6:-2] <NEW_LINE> count = 0 <NEW_LINE> msg._recs = [] <NEW_LINE> fields = self.fields[:] <NEW_LINE> for fmt in formats: <NEW_LINE> <INDENT> size1 = struct.calcsize(fmt) <NEW_LINE> if size1 > len(buf): <NEW_LINE> <INDENT> raise UBloxError("%s INVALID_SIZE1=%u" % (self.name, len(buf))) <NEW_LINE> <DEDENT> f1 = list(struct.unpack(fmt, buf[:size1])) <NEW_LINE> i = 0 <NEW_LINE> while i < len(f1): <NEW_LINE> <INDENT> field = fields.pop(0) <NEW_LINE> (fieldname, alen) = ArrayParse(field) <NEW_LINE> if alen == -1: <NEW_LINE> <INDENT> msg._fields[fieldname] = f1[i] <NEW_LINE> if self.count_field == fieldname: <NEW_LINE> <INDENT> count = int(f1[i]) <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg._fields[fieldname] = [0] * alen <NEW_LINE> for a in range(alen): <NEW_LINE> <INDENT> msg._fields[fieldname][a] = f1[i] <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> buf = buf[size1:] <NEW_LINE> if len(buf) == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if self.count_field == '_remaining': <NEW_LINE> <INDENT> count = len(buf) / struct.calcsize(self.format2) <NEW_LINE> <DEDENT> if count == 0: <NEW_LINE> <INDENT> msg._unpacked = True <NEW_LINE> if len(buf) != 0: <NEW_LINE> <INDENT> raise UBloxError("EXTRA_BYTES=%u" % len(buf)) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> size2 = struct.calcsize(self.format2) <NEW_LINE> for c in range(count): <NEW_LINE> <INDENT> r = UBloxAttrDict() <NEW_LINE> if size2 > len(buf): <NEW_LINE> <INDENT> raise UBloxError("INVALID_SIZE=%u, " % len(buf)) <NEW_LINE> <DEDENT> f2 = list(struct.unpack(self.format2, buf[:size2])) <NEW_LINE> for i in range(len(self.fields2)): <NEW_LINE> <INDENT> r[self.fields2[i]] = f2[i] <NEW_LINE> <DEDENT> buf = buf[size2:] <NEW_LINE> msg._recs.append(r) <NEW_LINE> <DEDENT> if len(buf) != 0: <NEW_LINE> <INDENT> raise UBloxError("EXTRA_BYTES=%u" % len(buf)) <NEW_LINE> <DEDENT> msg._unpacked = True
unpack a UBloxMessage, creating the .fields and ._recs attributes in msg
625941ba3eb6a72ae02ec373
def test_0050_loopback_prepare_test(self): <NEW_LINE> <INDENT> self.create_loopback_interfaces(2) <NEW_LINE> for i in range(2): <NEW_LINE> <INDENT> intf = self.lo_interfaces[i] <NEW_LINE> intf.admin_up() <NEW_LINE> intf._local_ip4 = self.pg_interfaces[i].remote_ip4 <NEW_LINE> intf._local_ip4_prefix_len = 32 <NEW_LINE> intf.config_ip4() <NEW_LINE> intf._local_ip6 = self.pg_interfaces[i].remote_ip6 <NEW_LINE> intf._local_ip6_prefix_len = 128 <NEW_LINE> intf.config_ip6()
Create loopbacks overlapping with remote addresses
625941ba23e79379d52ee406
def _connection(self): <NEW_LINE> <INDENT> thread_id = threading.current_thread().ident <NEW_LINE> with self._shared_map_lock: <NEW_LINE> <INDENT> if thread_id in self._connections: <NEW_LINE> <INDENT> return self._connections[thread_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conn = sqlite3.connect( self.path, timeout=beets.config['timeout'].as_number(), ) <NEW_LINE> conn.row_factory = sqlite3.Row <NEW_LINE> RegexpQuery.register(conn) <NEW_LINE> for prefix, query_class in plugins.queries().items(): <NEW_LINE> <INDENT> if issubclass(query_class, RegisteredFieldQuery): <NEW_LINE> <INDENT> query_class.register(conn) <NEW_LINE> <DEDENT> <DEDENT> self._connections[thread_id] = conn <NEW_LINE> return conn
Get a SQLite connection object to the underlying database. One connection object is created per thread.
625941ba091ae35668666e04
def statuscheck(self): <NEW_LINE> <INDENT> if self._checkout_record is None: <NEW_LINE> <INDENT> return self.make_return('nodb') <NEW_LINE> <DEDENT> status = self.db_state <NEW_LINE> return self.make_return(STATUS_OK if status=='available' else status)
Determine if the db exists, and if it does, can it be checked out. :return: 'available' if db is available for checkout, 'checkedout' if alreay checked out, 'nodb' if no db.
625941bad7e4931a7ee9ddbb
def version(self): <NEW_LINE> <INDENT> status, headers, data = self.resource.get_json() <NEW_LINE> return data['version']
The version string of the CouchDB server. Note that this results in a request being made, and can also be used to check for the availability of the server. :rtype: `unicode`
625941ba462c4b4f79d1d56f
def test_merge_overlaps_default_method(self): <NEW_LINE> <INDENT> tr1 = Trace(data=np.zeros(7)) <NEW_LINE> tr2 = Trace(data=np.ones(7)) <NEW_LINE> tr2.stats.starttime = tr1.stats.starttime + 5 <NEW_LINE> st = Stream([tr1, tr2]) <NEW_LINE> st.merge() <NEW_LINE> self.assertEqual(len(st), 1) <NEW_LINE> self.assertTrue(isinstance(st[0].data, np.ma.masked_array)) <NEW_LINE> self.assertEqual(st[0].data.tolist(), [0, 0, 0, 0, 0, None, None, 1, 1, 1, 1, 1]) <NEW_LINE> tr1 = Trace(data=np.arange(7)) <NEW_LINE> tr2 = Trace(data=np.arange(5, 10)) <NEW_LINE> tr2.stats.starttime = tr1.stats.starttime + 5 <NEW_LINE> st = Stream([tr1, tr2]) <NEW_LINE> st.merge() <NEW_LINE> self.assertEqual(len(st), 1) <NEW_LINE> self.assertTrue(isinstance(st[0].data, np.ndarray)) <NEW_LINE> np.testing.assert_array_equal(st[0].data, np.arange(10)) <NEW_LINE> tr1 = Trace(data=np.arange(10)) <NEW_LINE> tr2 = Trace(data=np.arange(5, 7)) <NEW_LINE> tr2.stats.starttime = tr1.stats.starttime + 5 <NEW_LINE> st = Stream([tr1, tr2]) <NEW_LINE> st.merge() <NEW_LINE> self.assertEqual(len(st), 1) <NEW_LINE> self.assertTrue(isinstance(st[0].data, np.ndarray)) <NEW_LINE> np.testing.assert_array_equal(st[0].data, np.arange(10)) <NEW_LINE> tr1 = Trace(data=np.zeros(10)) <NEW_LINE> tr2 = Trace(data=np.ones(2)) <NEW_LINE> tr2.stats.starttime = tr1.stats.starttime + 5 <NEW_LINE> st = Stream([tr1, tr2]) <NEW_LINE> st.merge() <NEW_LINE> self.assertEqual(len(st), 1) <NEW_LINE> self.assertTrue(isinstance(st[0].data, np.ma.masked_array)) <NEW_LINE> self.assertEqual(st[0].data.tolist(), [0, 0, 0, 0, 0, None, None, 0, 0, 0])
Test the merge method of the Stream object.
625941ba167d2b6e31218a37
def test_worker_tenant_no_force(self): <NEW_LINE> <INDENT> set_non_conv_tenant("tid", self) <NEW_LINE> self.addCleanup(set_config_data, {}) <NEW_LINE> d = controller.delete_group( "disp", self.log, 'transid', self.group, False) <NEW_LINE> self.assertIsNone(self.successResultOf(d)) <NEW_LINE> self.group.delete_group.assert_called_once_with() <NEW_LINE> self.assertFalse(self.mock_tcd.called)
Calls group.delete_group() for worker tenant when deleting normally
625941ba2eb69b55b151c74b
def popMessages(self): <NEW_LINE> <INDENT> self.logger.debug("Popping messages") <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> messages = list(self.messages) <NEW_LINE> self.messages = [] <NEW_LINE> return messages
Pops all messages that are stored and returns them :return: list List of the stored messages that were removed
625941ba2ae34c7f2600cfd1
def register(model): <NEW_LINE> <INDENT> moderator.register(model, Akismet)
Just a wrapper around django.contrib.comments.moderation.register. It's only argument is the model for comment moderation.
625941ba9f2886367277a730
def generate_nonce(): <NEW_LINE> <INDENT> return ''.join([uuid.uuid1().hex, uuid.uuid4().hex])
Generate a nonce to be used with OAuth.
625941ba85dfad0860c3acf8
def getObject(self, ref): <NEW_LINE> <INDENT> return self.objects.getByReference(ref)
Gets an object based on a reference. @param ref: The reference for the object. @type ref: C{int} @return: The referenced object or C{None} if not found.
625941ba462c4b4f79d1d570
def corr(r, K, C, M, u, udot, uddot, dt, beta, gamma): <NEW_LINE> <INDENT> K_eff = K + (gamma*dt)/(beta*dt**2)*C + 1/(beta*dt**2)*M <NEW_LINE> du = solve(K_eff, r) <NEW_LINE> u = u + du <NEW_LINE> udot = udot + gamma*dt/(beta*dt**2)*du <NEW_LINE> uddot = uddot + 1/(beta*dt**2)*du <NEW_LINE> return u, udot, uddot, du
Corrector step in non-linear Newmark algorithm. Parameters: ----------- r : double Residual forces. K : double Next-step (tangent) stiffness matrix (time step k+1), ndofs-by-ndofs Numpy array. C : double Damping matrix, ndofs-by-ndofs Numpy array. M : double Mass matrix, ndofs-by-ndofs Numpy array. u : double Next-step displacement, time step k+1, iteration i, ndofs-by-1 Numpy array. udot : double Next-step velocity, time step k+1, iteration i, ndofs-by-1 Numpy array. uddot : double Next-step acceleration, time step k+1, iteration i, ndofs-by-1 Numpy array. dt : double Current time step, from k to k+1. beta : double Scalar value specifying the beta parameter. gamma : double Scalar value specifying the gamma parameter. Returns: ----------- u : double Predicted next-step displacement, time step k+1, iteration i+1, ndofs-by-1 Numpy array. udot : double Predicted next-step velocity, time step k+1, iteration i+1, ndofs-by-1 Numpy array. uddot : double Predicted next-step acceleration, time step k+1), iteration i+1, ndofs-by-1 Numpy array. norm_r : double Frobenius norm of residual force vector norm_u : double Frobenius norm of added displacement
625941ba627d3e7fe0d68cee
def macports(self, packager, **kwds): <NEW_LINE> <INDENT> package = 'vtk' <NEW_LINE> try: <NEW_LINE> <INDENT> self.version, _ = packager.info(package=package) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> msg = 'the package {!r} is not installed'.format(package) <NEW_LINE> raise self.ConfigurationError(configurable=self, errors=[msg]) <NEW_LINE> <DEDENT> self.version, _ = packager.info(package=package) <NEW_LINE> contents = tuple(packager.contents(package=package)) <NEW_LINE> header = 'vtkVersion.h' <NEW_LINE> incdir = packager.findfirst(target=header, contents=contents) <NEW_LINE> self.incdir = [ incdir ] if incdir else [] <NEW_LINE> stem = self.libgen('CommonCore') <NEW_LINE> libvtk = self.pyre_host.dynamicLibrary(stem) <NEW_LINE> libdir = packager.findfirst(target=libvtk, contents=contents) <NEW_LINE> self.libdir = [ libdir ] if libdir else [] <NEW_LINE> self.libraries = stem <NEW_LINE> self.prefix = self.commonpath(folders=self.incdir+self.libdir) <NEW_LINE> return
Attempt to repair my configuration
625941bad99f1b3c44c67436
def get_load_times(self, asset_type): <NEW_LINE> <INDENT> load_times = [] <NEW_LINE> search_str = "{0}_load_time".format(asset_type) <NEW_LINE> for har_page in self.pages: <NEW_LINE> <INDENT> val = getattr(har_page, search_str, None) <NEW_LINE> if val is not None: <NEW_LINE> <INDENT> load_times.append(val) <NEW_LINE> <DEDENT> <DEDENT> return load_times
Just a ``list`` of the load times of a certain asset type for each page :param asset_type: ``str`` of the asset type to return load times for
625941bad486a94d0b98dfec
def getWordScore(word, n): <NEW_LINE> <INDENT> if len(word) <= 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> points = 0 <NEW_LINE> for i in word: <NEW_LINE> <INDENT> if i in SCRABBLE_LETTER_VALUES: <NEW_LINE> <INDENT> points += SCRABBLE_LETTER_VALUES[i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> points *= len(word) <NEW_LINE> if len(word) == n: <NEW_LINE> <INDENT> points += 50 <NEW_LINE> <DEDENT> return points
Returns the score for a word. Assumes the word is a valid word. The score for a word is the sum of the points for letters in the word, multiplied by the length of the word, PLUS 50 points if all n letters are used on the first turn. Letters are scored as in Scrabble; A is worth 1, B is worth 3, C is worth 3, D is worth 2, E is worth 1, and so on (see SCRABBLE_LETTER_VALUES) word: string (lowercase letters) n: integer (HAND_SIZE; i.e., hand size required for additional points) returns: int >= 0
625941babde94217f3682c9b
def parseEndTime(s): <NEW_LINE> <INDENT> if len(s) > 1 and s[0:2].isdigit(): <NEW_LINE> <INDENT> time = int(s[0:2]) <NEW_LINE> rest = s[2:] <NEW_LINE> <DEDENT> elif len(s) > 0 and s[0].isdigit(): <NEW_LINE> <INDENT> time = int(s[0]) <NEW_LINE> rest = s[1:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Error: could not parse time on string ' + s) <NEW_LINE> return <NEW_LINE> <DEDENT> if time < 11: <NEW_LINE> <INDENT> time += 12 <NEW_LINE> <DEDENT> if rest.startswith(':'): <NEW_LINE> <INDENT> if not rest.startswith(':00'): <NEW_LINE> <INDENT> time += 1 <NEW_LINE> <DEDENT> rest = rest[3:] <NEW_LINE> <DEDENT> return time, rest
Parse a single hour in range 11am-10pm. Converts to 24h time
625941bacdde0d52a9e52ecf
def base_poset(self): <NEW_LINE> <INDENT> return self._base_poset
Return the base poset from which everything of ``self`` was constructed. Elements of the base poset correspond to the basic ``analytic properties``. EXAMPLES:: sage: from sage.modular.modform_hecketriangle.analytic_type import AnalyticType sage: from sage.combinat.posets.posets import FinitePoset sage: AT = AnalyticType() sage: P = AT.base_poset() sage: P Finite poset containing 5 elements sage: isinstance(P, FinitePoset) True sage: P.is_lattice() False sage: P.is_finite() True sage: P.cardinality() 5 sage: P.is_bounded() False sage: P.list() [quasi, cusp, holo, weak, mero] sage: len(P.relations()) 11 sage: P.cover_relations() [[cusp, holo], [holo, weak], [weak, mero]] sage: P.has_top() False sage: P.has_bottom() False
625941ba66673b3332b91f31
def get_label(row): <NEW_LINE> <INDENT> return row[6]
Extract the active speaker label from the annotations.
625941ba21a7993f00bc7b8a
def SavePyPlotToFile(self, extender:str = None, orientation:str = 'landscape', image_type:str = 'png'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if extender is None: <NEW_LINE> <INDENT> self._pyplot_figure.savefig((self._model_description+'plot.'+image_type), orientation=orientation) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._pyplot_figure.savefig((self._model_description+extender+'.'+image_type), orientation=orientation) <NEW_LINE> <DEDENT> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> template = "An exception of type {0} occurred in [PlotSaver.SavePyPlotToFile]. Arguments:\n{1!r}" <NEW_LINE> message = template.format(type(ex).__name__, ex.args) <NEW_LINE> print(message) <NEW_LINE> print(ex)
This function is a simple wrapper for the PyPlot savefig function with default values. :param extender:str: extender for the filename [Default None] :param orientation:str: print orientation [Default 'landscape'] :parama image_type:str: image file type [Default 'png']
625941ba8a43f66fc4b53f09
def check_input(self): <NEW_LINE> <INDENT> param1 = self.input['param1'] <NEW_LINE> param2 = self.input['param2'] <NEW_LINE> if not isinstance(param1, (int, float, complex)): <NEW_LINE> <INDENT> raise Exception("Error: %s is not a valid number!" % str(param1)) <NEW_LINE> <DEDENT> if not isinstance(param2, (int, float, complex)): <NEW_LINE> <INDENT> raise Exception("Error: %s is not a valid number!" % str(param2))
Checks if inputs are numbers Raises an Exception if inputs are invalid
625941ba9b70327d1c4e0c73
def __syntax(self, text): <NEW_LINE> <INDENT> s = pattern.en.parsetree(text, relations = True, lemmata = True) <NEW_LINE> text_chunks = [] <NEW_LINE> for sentence in s: <NEW_LINE> <INDENT> out = "" <NEW_LINE> for chunk in sentence.chunks: <NEW_LINE> <INDENT> out += str(chunk.type) <NEW_LINE> <DEDENT> text_chunks.append(out) <NEW_LINE> <DEDENT> text_chunks_out = [" ".join(text_chunks)] <NEW_LINE> return (self.chunk_vectorizer.transform(text_chunks_out),)
Use the pattern sentence tree parsing tool to split up the sentence into its chunk permutation :param title: :param text: :return: (chunk permutations of each type of the entire text)
625941baf7d966606f6a9ea8
def GetFixedImage(self): <NEW_LINE> <INDENT> return _itkPDEDeformableRegistrationFilterPython.itkPDEDeformableRegistrationFilterIF3IF3IVF33_GetFixedImage(self)
GetFixedImage(self) -> itkImageF3
625941badd821e528d63b04a
def copy_Z(Z): <NEW_LINE> <INDENT> N = copy_N([Z[1], Z[2]]) <NEW_LINE> return [Z[0], N[0], N[1]]
Копирование целого числа
625941ba85dfad0860c3acf9
@api_view(['GET']) <NEW_LINE> def get_users_by_email(request, email): <NEW_LINE> <INDENT> users = User.objects.filter(is_staff=False, is_superuser=False, username__startswith=email)[:5] <NEW_LINE> for user in users: <NEW_LINE> <INDENT> if not hasattr(user, 'profile'): <NEW_LINE> <INDENT> request.user.save() <NEW_LINE> <DEDENT> <DEDENT> serializer = UserSerializer(users, many=True) <NEW_LINE> return Response(serializer.data)
Returns user object using user email address :param request: :param email: :return: 200 successful
625941ba31939e2706e4cd0f
def test_message_send_later_time_sent_is_curr_time(user_1, user_2, public_channel_1): <NEW_LINE> <INDENT> curr_time = int(datetime.now(tz=timezone.utc).timestamp()) <NEW_LINE> channel.channel_join(user_2['token'], public_channel_1['channel_id']) <NEW_LINE> message.message_sendlater(user_1['token'], public_channel_1['channel_id'], "Hi", curr_time) <NEW_LINE> message_list = channel.channel_messages(user_1['token'], public_channel_1['channel_id'], 0) <NEW_LINE> message_count = 0 <NEW_LINE> for msg in message_list['messages']: <NEW_LINE> <INDENT> message_count += 1 <NEW_LINE> assert msg['time_created'] == curr_time <NEW_LINE> <DEDENT> assert message_count == 1 <NEW_LINE> clear()
Testing a case where time sent is the current time
625941ba16aa5153ce362318
def search(self, word): <NEW_LINE> <INDENT> curNode = self.root <NEW_LINE> for i in word: <NEW_LINE> <INDENT> if i not in curNode: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> curNode = curNode[i] <NEW_LINE> <DEDENT> if curNode.get("#", -1) != -1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Returns if the word is in the trie. :type word: str :rtype: bool
625941ba090684286d50eb81
def print_data(data_dict): <NEW_LINE> <INDENT> print("id, age, karma, reciprocal, result") <NEW_LINE> for i in range(len(data_dict["id"])): <NEW_LINE> <INDENT> print(( data_dict["id"][i], data_dict["age"][i], data_dict["karma"][i], will_reciprocate(data_dict, i), data_dict["result"][i] ))
Prints the data for examining or debugging purposes.
625941bade87d2750b85fc2e
def testRemovingWeightsFromLinkedPoints(self): <NEW_LINE> <INDENT> self._runTest(starts=[1,2], ids=['1','2'], edges=['2','1'], weights=[[4],[5]], removeWeights=True, expStarts=[1,2], expIds=['1','2'], expEdges=['2','1'], expTrackFormatType="Linked points")
Removing both links and values :return:
625941bacc40096d615957f3
def schedule(time, func, args): <NEW_LINE> <INDENT> return ((time, func, args) for i in cycle([1]))
Utility function which creates a schedule appropriate for initializing a SchedulerPlayer with. @param time: Time ugen @param func: A function to call @param args: Keyword arguments to C{func}
625941ba6fb2d068a760ef3b
def add_user(username, password, email, role_type): <NEW_LINE> <INDENT> Session = sessionmaker(bind=engine) <NEW_LINE> s = Session() <NEW_LINE> user = User(username, password, email, role_type) <NEW_LINE> s.add(user) <NEW_LINE> s.commit() <NEW_LINE> s.close()
adds user to the database :param username: username to add :param password: password to attach to username :param email: user email :param role_type: user role :return:
625941ba44b2445a33931f40
def lock_all (self, *args, **kw): <NEW_LINE> <INDENT> self.lock(*self.tid.keys())
event handler: locks all registered callbacks;
625941bad53ae8145f87a116
def __init__(self, shieldsheet, projsheet): <NEW_LINE> <INDENT> varis = pd.DataFrame(pd.read_excel( dirs.in_dir+'ShieldProperties.xlsx', sheet_name=shieldsheet, index_col=0, header=0, dtype={'denotion': str, 'Value': float}, engine='openpyxl')) <NEW_LINE> varip = pd.DataFrame(pd.read_excel( dirs.in_dir+'ProjectileProperties.xlsx', sheet_name=projsheet, index_col=0, header=0, dtype={'Value': float}, engine='openpyxl')) <NEW_LINE> varo = varis.values <NEW_LINE> vara = varip.values <NEW_LINE> self.S1 = varo[0, 1] <NEW_LINE> self.t_ob = varo[1, 1] <NEW_LINE> self.rho_b = varo[2, 1] <NEW_LINE> self.Arho_b = varo[3, 1] <NEW_LINE> self.Sigma = varo[4, 1] <NEW_LINE> self.t_b = varo[5, 1] <NEW_LINE> self.S2 = varo[6, 1] <NEW_LINE> self.t_wall = varo[7, 1] <NEW_LINE> self.di = vara[0, 0] <NEW_LINE> self.rho_p = vara[1, 0] <NEW_LINE> self.impact_angle = 0 <NEW_LINE> self.vel = 1 <NEW_LINE> self.rngdcrit = [] <NEW_LINE> self.Arho_max = 3 <NEW_LINE> self.succ_rate = 95 <NEW_LINE> self.col = random.randint(0,255)/255 <NEW_LINE> self.lbl = shieldsheet <NEW_LINE> self.maxvel = 16 <NEW_LINE> self.minvel = 0.001 <NEW_LINE> self.v_shat = 3 <NEW_LINE> self.v_vap = 7
This function defines the variables from a given shield sheet and projectile sheet
625941ba9f2886367277a731
def Round_robin(self,task): <NEW_LINE> <INDENT> st = self.RR_index <NEW_LINE> length = len(self.servers) <NEW_LINE> Success = False <NEW_LINE> while (self.RR_index + i + 1 ) % length != st: <NEW_LINE> <INDENT> server = self.servers[self.RR_index] <NEW_LINE> if SAL(task,server): <NEW_LINE> <INDENT> Success = True <NEW_LINE> break <NEW_LINE> <DEDENT> self.RR_index = (self.RR_index + 1) % length <NEW_LINE> <DEDENT> if Success: <NEW_LINE> <INDENT> return self.servers[self.RR_index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Round robin selection Args: task: pointer of the task Returns: Pointer of the server, otherwise None
625941baa17c0f6771cbdef4
def itkMaskImageFilterID3IUL3ID3_cast(*args): <NEW_LINE> <INDENT> return _itkMaskImageFilterPython.itkMaskImageFilterID3IUL3ID3_cast(*args)
itkMaskImageFilterID3IUL3ID3_cast(itkLightObject obj) -> itkMaskImageFilterID3IUL3ID3
625941ba1f5feb6acb0c49f5
def beam_search(self, source_tensor, beam_size): <NEW_LINE> <INDENT> batch_size = source_tensor.size(1) <NEW_LINE> assert batch_size == 1 <NEW_LINE> encoder_init_hidden = torch.zeros(1, batch_size, self.params.hidden_layer_units, device=device) <NEW_LINE> encoder_word_embeddings = self.embedding(source_tensor) <NEW_LINE> encoder_outputs, encoder_hidden = self.encoder(encoder_word_embeddings, encoder_init_hidden) <NEW_LINE> encoder_outputs = encoder_outputs.expand(-1, beam_size, -1).contiguous() <NEW_LINE> decoder_hidden_cur_step = encoder_hidden.expand(-1, beam_size, -1).contiguous() <NEW_LINE> be = Beam(beam_size, self.special_tokens) <NEW_LINE> step = 0 <NEW_LINE> while step <= self.params.max_dec_steps: <NEW_LINE> <INDENT> decoder_input_cur_step = be.states[-1] <NEW_LINE> decoder_cur_word_embedding = self.embedding(decoder_input_cur_step) <NEW_LINE> decoder_output_cur_step, decoder_hidden_cur_step = self.decoder( decoder_cur_word_embedding, decoder_hidden_cur_step, encoder_outputs ) <NEW_LINE> if be.advance(decoder_output_cur_step): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> step += 1 <NEW_LINE> <DEDENT> result_tokens = be.trace(0) <NEW_LINE> output_token_idx = torch.tensor(result_tokens, device=device, dtype=torch.long).unsqueeze(1).expand(-1, batch_size) <NEW_LINE> return output_token_idx, torch.tensor(0., device=device)
Use beam search to generate summaries one by one. :param source_tensor: (src seq len, batch size), batch size need to be 1. :param beam_size: beam search size :return: same as forward
625941ba4e696a04525c92ed
def SetOutputMinimum(self, *args): <NEW_LINE> <INDENT> return _itkIntensityWindowingImageFilterPython.itkIntensityWindowingImageFilterIUC2IUC2_SetOutputMinimum(self, *args)
SetOutputMinimum(self, unsigned char _arg)
625941ba8a349b6b435e8015
def _addcode(self, code): <NEW_LINE> <INDENT> self.shell.set_next_input(code)
Shorthand for self.shell.set_next_input
625941ba15fb5d323cde09ab
def parent(self, p): <NEW_LINE> <INDENT> node = self._validate(p) <NEW_LINE> return self._make_position(node._parent)
Return the Position of p's parent(or None if p is root)
625941ba377c676e9127204b
@app.route('/index', methods = ['GET','POST']) <NEW_LINE> @login_required <NEW_LINE> def index(): <NEW_LINE> <INDENT> users = DatabaseManager.getAllUsers() <NEW_LINE> topics = DatabaseManager.getAllPublicTopics() <NEW_LINE> comments = DatabaseManager.getAllComments() <NEW_LINE> numNots = DatabaseManager.countNotifications(current_user) <NEW_LINE> form = FindUserForm() <NEW_LINE> if form.validate_on_submit(): <NEW_LINE> <INDENT> user = DatabaseManager.getUserByUsername(form.username.data) <NEW_LINE> if user is None: <NEW_LINE> <INDENT> flash('No such user exists','error') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return redirect(url_for('generalProfile',id=user.id)) <NEW_LINE> <DEDENT> <DEDENT> return render_template('index.html', title='Home', users=users, topics=topics, comments=comments, form=form, numNots=numNots)
loads the index page
625941ba99fddb7c1c9de233
def removeDuplicates(self, nums): <NEW_LINE> <INDENT> if not nums: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> j = 1 <NEW_LINE> for i in range(1, len(nums)): <NEW_LINE> <INDENT> if nums[i] != nums[i-1]: <NEW_LINE> <INDENT> nums[j] = nums[i] <NEW_LINE> j += 1 <NEW_LINE> <DEDENT> <DEDENT> return j
:type nums: List[int] :rtype: int
625941ba8c3a87329515825f
@pytest.fixture(scope="module", params=test_data.values(), ids=list(test_data.keys())) <NEW_LINE> def option_data(request): <NEW_LINE> <INDENT> yield request.param
Return the test data.
625941ba009cb60464c6325c
def __init__(self, name=None, zigbee_channel=None, bridge_id=None, mac=None, dhcp=None, ip_address=None, netmask=None, gateway=None, proxy_address=None, proxy_port=None, utc=None, localtime=None, timezone=None, model_id=None, datastore_version=None, sw_version=None, api_version=None, sw_update=None, sw_update_2=None, link_button=None, portal_services=None, portal_connection=None, portal_state=None, internet_services=None, factory_new=None, replaces_bridge_id=None, backup=None, starterkit_id=None, whitelist=None): <NEW_LINE> <INDENT> super().__init__(name=name, zigbee_channel=zigbee_channel, dhcp=dhcp, ip_address=ip_address, netmask=netmask, gateway=gateway, proxy_address=proxy_address, proxy_port=proxy_port, utc=utc, timezone=timezone, sw_update=sw_update, link_button=link_button) <NEW_LINE> self.__bridge_id = bridge_id <NEW_LINE> self.__mac = mac <NEW_LINE> self.__localtime = localtime <NEW_LINE> self.__model_id = model_id <NEW_LINE> self.__datastore_version = datastore_version <NEW_LINE> self.__sw_version = sw_version <NEW_LINE> self.__api_version = api_version <NEW_LINE> self.__sw_update_2 = sw_update_2 <NEW_LINE> self.__portal_services = Datum.bool(portal_services) <NEW_LINE> self.__portal_connection = portal_connection <NEW_LINE> self.__portal_state = portal_state <NEW_LINE> self.__internet_services = internet_services <NEW_LINE> self.__factory_new = Datum.bool(factory_new) <NEW_LINE> self.__replaces_bridge_id = replaces_bridge_id <NEW_LINE> self.__backup = backup <NEW_LINE> self.__starterkit_id = starterkit_id <NEW_LINE> self.__whitelist = whitelist
Constructor
625941ba0a366e3fb873e6b8
def get_items(cata_name): <NEW_LINE> <INDENT> items = session.query(Item).join(Catalog).filter( Catalog.name == cata_name).all() <NEW_LINE> return items
Return all items of the selected category
625941bafff4ab517eb2f2da
def delete(self, name, project=None): <NEW_LINE> <INDENT> raise NotImplementedError
Base method for deleting a queue. :param name: The queue name :param project: Project id
625941badc8b845886cb53d5
def get_rss_feed_parts(content, overtime=False): <NEW_LINE> <INDENT> tree = etree.fromstring(content, parser=etree.XMLParser(recover=overtime)) <NEW_LINE> if tree is None: <NEW_LINE> <INDENT> raise NotAnRSSFeedError('Tree cannot be parsed.') <NEW_LINE> <DEDENT> if not _is_probably_an_rss_feed(tree): <NEW_LINE> <INDENT> raise NotAnRSSFeedError('This content doesn\'t seem to be an RSS feed.') <NEW_LINE> <DEDENT> channel = tree.find('{*}channel') <NEW_LINE> channel = channel if channel is not None else tree <NEW_LINE> nsmatch = re.match('\{.*\}', channel.tag) <NEW_LINE> defaultns = nsmatch.group(0) if nsmatch else '' <NEW_LINE> return (channel, defaultns)
Given the byte content of what is supposed to be an RSS feed, return the channel element tree and the default namespace. :throws:
625941ba1f037a2d8b94609f
def has_child(self, guide): <NEW_LINE> <INDENT> guide = self.validate(guide) <NEW_LINE> has_child = False <NEW_LINE> if self.exists(): <NEW_LINE> <INDENT> has_child = guide in self.children <NEW_LINE> <DEDENT> return has_child
Is input guide an immediate child of this guide? :param guide: Guide :type guide: Guide, str :returns: If the guide is a child :rtype: boolean **Example:** >>> root.has_child("L_hip_0_gde") # Result: True #
625941baf8510a7c17cf95a4
def check_option(self, option, label, value): <NEW_LINE> <INDENT> assert isinstance(option, Option) <NEW_LINE> assert option.label == label <NEW_LINE> assert option.value == value
Perform common tests on an ``Option``. I check the option's class, label, and value.
625941ba3539df3088e2e1ec
def is_queued(self, subject_id): <NEW_LINE> <INDENT> path = self.get_subject_filepath(subject_id, 'queue') <NEW_LINE> return os.path.exists(path)
Returns True if the subject identifier is in our cache queue. :param subject_id: Subject ID
625941bad6c5a10208143ee8
def __init__(self, title,objects,attributes,rels): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.objects = objects <NEW_LINE> self.attributes = attributes <NEW_LINE> self.rels = rels
Initialize a concept with given extent and intent
625941baeab8aa0e5d26d9ff
def py_27_fix(config): <NEW_LINE> <INDENT> if not sys.version_info > (3, 0): <NEW_LINE> <INDENT> path_list = [] <NEW_LINE> if config.get("ADDITIONAL_SCRIPTS"): <NEW_LINE> <INDENT> path_to_directory = os.path.dirname(config.get("ADDITIONAL_SCRIPTS")) <NEW_LINE> path = "" <NEW_LINE> for directories in path_to_directory.split(os.sep): <NEW_LINE> <INDENT> path += directories + os.sep <NEW_LINE> init_file = path + os.sep + "__init__.py" <NEW_LINE> if not os.path.isfile(init_file): <NEW_LINE> <INDENT> open(init_file, 'w').close() <NEW_LINE> path_list.append(init_file) <NEW_LINE> <DEDENT> <DEDENT> config["DYNAMIC_BUILD_INIT_FILES"] = ",".join(path_list) <NEW_LINE> <DEDENT> <DEDENT> return config
Prepares build for python 2.7 => build :param config: The environment variables used in the build process :type config: Dictionary :returns: config dictionary :rtype: Dictionary
625941ba6e29344779a624b6
def classSummary(self, data): <NEW_LINE> <INDENT> class_dictionary = self.classSeparate(data) <NEW_LINE> summary = {} <NEW_LINE> for class_value, class_feature in class_dictionary.items(): <NEW_LINE> <INDENT> summary[class_value] = self.dataSummary(class_feature) <NEW_LINE> <DEDENT> return summary
Creates a dictionary summary of the mean, standard deviation, and lenght of 'n' features. Support function. Returns: var_classSummary (dictionary) dictionary summary with class value as keys pair.
625941ba5f7d997b8717493c
def learning_rule(self, e, x): <NEW_LINE> <INDENT> return np.zeros(len(x))
This functions computes the increment of adaptive weights. **Args:** * `e` : error of the adaptive filter (1d array) * `x` : input matrix (2d array) **Returns** * increments of adaptive weights - result of adaptation
625941bad58c6744b4257b01
def azel2radec( az_deg: float, el_deg: float, lat_deg: float, lon_deg: float, time: datetime, *, use_astropy: bool = True ) -> tuple[float, float]: <NEW_LINE> <INDENT> if use_astropy and Time is not None: <NEW_LINE> <INDENT> obs = EarthLocation(lat=lat_deg * u.deg, lon=lon_deg * u.deg) <NEW_LINE> direc = AltAz( location=obs, obstime=Time(str2dt(time)), az=az_deg * u.deg, alt=el_deg * u.deg ) <NEW_LINE> sky = SkyCoord(direc.transform_to(ICRS())) <NEW_LINE> return sky.ra.deg, sky.dec.deg <NEW_LINE> <DEDENT> return vazel2radec(az_deg, el_deg, lat_deg, lon_deg, time)
viewing angle (az, el) to sky coordinates (ra, dec) Parameters ---------- az_deg : float azimuth [degrees clockwize from North] el_deg : float elevation [degrees above horizon (neglecting aberration)] lat_deg : float observer latitude [-90, 90] lon_deg : float observer longitude [-180, 180] (degrees) time : datetime.datetime or str time of observation use_astropy : bool, optional default use astropy. Returns ------- ra_deg : float ecliptic right ascension (degress) dec_deg : float ecliptic declination (degrees)
625941ba7b180e01f3dc46a5
def image_folders_id_replace_post(self, id, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.image_folders_id_replace_post_with_http_info(id, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.image_folders_id_replace_post_with_http_info(id, **kwargs) <NEW_LINE> return data
Replace attributes for a model instance and persist it into the data source. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.image_folders_id_replace_post(id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str id: Model id (required) :param ImageFolder data: Model instance data :return: ImageFolder If the method is called asynchronously, returns the request thread.
625941ba851cf427c661a3b3
def draw_screen(self): <NEW_LINE> <INDENT> if not self.screen_size: <NEW_LINE> <INDENT> self.screen_size = self.screen.get_cols_rows() <NEW_LINE> <DEDENT> canvas = self.widget.render(self.screen_size, focus=True) <NEW_LINE> self.screen.draw_screen(self.screen_size, canvas)
Renter the widgets and paint the screen. This function is called automatically from run() but may be called additional times if repainting is required without also processing input.
625941ba21a7993f00bc7b8b
def testModifiedSetUncover( self ): <NEW_LINE> <INDENT> game = minesweeper.Game() <NEW_LINE> game.Uncover( 4, 6 ) <NEW_LINE> self.assertEqual( True, game.IsModified() )
Modified flag has to set automatically when uncover a cell.
625941bafff4ab517eb2f2db
def convertPDF(self, infile): <NEW_LINE> <INDENT> self.logger.info("Converting %s to xml", infile) <NEW_LINE> p = PDFXConverter() <NEW_LINE> outfile = os.path.join(self.outdir, self.name + ".xml") <NEW_LINE> p.convert(infile, outfile) <NEW_LINE> return outfile
Small routine for starting the PDF conversion call
625941ba16aa5153ce362319
def _onResize(self, event): <NEW_LINE> <INDENT> self._canvas.delete(tkinter.ALL) <NEW_LINE> self._createBlankBoard() <NEW_LINE> self._placeDiscs()
rezise window event handling
625941bae8904600ed9f1dca
def day_finalize(self): <NEW_LINE> <INDENT> pass
供子类挂接
625941ba8e71fb1e9831d64e
def parse_arguments(input_arguments: list) -> argparse.Namespace: <NEW_LINE> <INDENT> program_name = os.path.basename(input_arguments[0]) <NEW_LINE> parser = argparse.ArgumentParser(description="Tools to access CHADO databases", epilog="For detailed usage information type '" + program_name + " <command> -h'", prog=program_name, allow_abbrev=False) <NEW_LINE> parser.add_argument("-v", "--version", help="show the version of the software and exit", action='version', version=str(pkg_resources.get_distribution("chado-tools").version)) <NEW_LINE> subparsers = parser.add_subparsers() <NEW_LINE> for command, description in general_commands().items(): <NEW_LINE> <INDENT> sub = subparsers.add_parser(command, description=description, help=description) <NEW_LINE> add_general_arguments(sub) <NEW_LINE> add_arguments_by_command(command, sub) <NEW_LINE> <DEDENT> for command, description in wrapper_commands().items(): <NEW_LINE> <INDENT> sub = subparsers.add_parser(command, description=description, help=description) <NEW_LINE> add_arguments_by_command(command, sub) <NEW_LINE> <DEDENT> return parser.parse_args(input_arguments[1:])
Defines the formal arguments of the 'chado' command and parses the actual arguments accordingly
625941bad18da76e23532373
@subcommand <NEW_LINE> def populate(): <NEW_LINE> <INDENT> os.chdir("Misc") <NEW_LINE> safe_mkdir("NEWS.d/next") <NEW_LINE> for section in sections: <NEW_LINE> <INDENT> dir_name = sanitize_section(section) <NEW_LINE> dir_path = "NEWS.d/next/{}".format(dir_name) <NEW_LINE> safe_mkdir(dir_path) <NEW_LINE> readme_path = "NEWS.d/next/{}/README.rst".format(dir_name) <NEW_LINE> with open(readme_path, "wt", encoding="utf-8") as f: <NEW_LINE> <INDENT> f.write("Put news entry ``blurb`` files for the *{}* section in this directory.\n".format(section)) <NEW_LINE> <DEDENT> git_add_files.append(dir_path) <NEW_LINE> git_add_files.append(readme_path) <NEW_LINE> <DEDENT> flush_git_add_files()
Creates and populates the Misc/NEWS.d directory tree.
625941ba091ae35668666e06
def calc_temps(start_date, end_date): <NEW_LINE> <INDENT> calc_all_temps = session.query( func.min(Measurement.tobs), func.avg(Measurement.tobs), func.max(Measurement.tobs)). filter(Measurement.date >= start_date). filter(Measurement.date <= end_date).all() <NEW_LINE> return calc_all_temps
TMIN, TAVG, and TMAX for a list of dates. Args: start_date (string): A date string in the format %Y-%m-%d end_date (string): A date string in the format %Y-%m-%d Returns: TMIN, TAVE, and TMAX
625941ba0c0af96317bb808a
def augment_translate(image, max_chance=0.5, max_range=0.1, borderValue=128): <NEW_LINE> <INDENT> chance_x = np.random.rand() <NEW_LINE> chance_y = np.random.rand() <NEW_LINE> if not isinstance(max_range, list): <NEW_LINE> <INDENT> max_range = [-max_range, max_range] <NEW_LINE> <DEDENT> if image.ndim > 2 and not isinstance(borderValue, list): <NEW_LINE> <INDENT> borderValue = tuple([borderValue] * image.shape[-1]) <NEW_LINE> <DEDENT> h,w,ch = image.shape[:2] + (image.shape[-1] if image.ndim > 2 else 0,) <NEW_LINE> x_tr = np.random.uniform(max_range[0], max_range[1]) if chance_x < max_chance else 0 <NEW_LINE> y_tr = np.random.uniform(max_range[0], max_range[1]) if chance_y < max_chance else 0 <NEW_LINE> image[:] = cv2.warpAffine(image, np.float32([[1,0,w*x_tr],[0,1,h*y_tr]]), (w,h), borderMode=cv2.BORDER_CONSTANT, borderValue=borderValue)
Performs an affine translation of the image Args: image (numpy.ndarray): Input image in the Numpy format max_chance (float, optional): Occurence probability threshold. Defaults to 0.5. max_range (float, optional): Maximum range, as a fraction of the image size, e.g. value '0.2' means a translation of up to 20% of the image size. Can be given as one value (0.2) or as a range (-0.2, 0.2). Defaults to 0.1. borderValue (int, optional): Border value for padding. Defaults to 128.
625941ba379a373c97cfa9eb
def render_to_string(self, request, context): <NEW_LINE> <INDENT> context_data = { 'extension': self.extension, } <NEW_LINE> context_data.update(self.get_extra_context(request, context)) <NEW_LINE> context_data.update(self.extra_context) <NEW_LINE> context.update(context_data) <NEW_LINE> s = render_to_string(template_name=self.template_name, context=context.flatten(), request=request) <NEW_LINE> context.pop() <NEW_LINE> return s
Renders the content for the hook. By default, this renders the provided template name to a string and returns it.
625941bad58c6744b4257b02
def __getName(self): <NEW_LINE> <INDENT> return self._name
получаем имя модели
625941ba2ae34c7f2600cfd3
def _update_contribs(self, name, change): <NEW_LINE> <INDENT> setattr(self, name, list(getattr(self, '_'+name).contributions))
Update the list of available contributions (editors, engines, tools) when they change.
625941ba85dfad0860c3acfa
def pulled_up(self, function): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> header, number = self.physical_pin(function) <NEW_LINE> <DEDENT> except PinNoPins: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.headers[header].pins[number].pull_up
Returns a bool indicating whether a physical pull-up is attached to the pin supporting the specified *function*. Either :exc:`PinNoPins` or :exc:`PinMultiplePins` may be raised if the function is not associated with a single pin. :param str function: The pin function you wish to determine pull-up for. Usually this is something like "GPIO9" for Broadcom GPIO pin 9.
625941ba0383005118ecf486
def add(self, cls, name=None): <NEW_LINE> <INDENT> if not name: <NEW_LINE> <INDENT> name = cls.__name__ <NEW_LINE> <DEDENT> self[name] = cls
Stores a local class :param cls: A class :param name: Custom name used in the __jsonclass__ attribute
625941badc8b845886cb53d6
def _user_choices(msg, q, choices, target, attempts=5): <NEW_LINE> <INDENT> choicesstr = "\n" + ', '.join(choices[:-1]) + ' or ' + choices[-1] + ' ' <NEW_LINE> for _ in range(attempts): <NEW_LINE> <INDENT> print('\a') <NEW_LINE> ret_val = input(reformat_text(msg + choicesstr)) <NEW_LINE> ret_val = target(ret_val, choices) <NEW_LINE> if ret_val: <NEW_LINE> <INDENT> q.put(('Result', ret_val)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> q.put('Exception', UserInputError("Maximum number of attempts {} reached".format(attempts)))
This can be replaced anywhere in the project that needs to implement the user driver Temporarily a simple input function. The result needs to be put in the queue with the first part of the tuple as 'Exception' or 'Result' and the second part is the exception object or response object This needs to be compatible with forced exit. Look to user action for how it handles a forced exit :param msg: Message for the user to understand what to input :param q: The result queue of type queue.Queue :param target: Optional Validation function to check if the user response is valid :param attempts: :return:
625941ba9f2886367277a732
def add_edge(self, label, label_value1, label_value2, default_property, property=None): <NEW_LINE> <INDENT> n1 = None <NEW_LINE> n2 = None <NEW_LINE> for n in self.g._nodes: <NEW_LINE> <INDENT> if n[label] == label_value1: <NEW_LINE> <INDENT> n1 = n <NEW_LINE> <DEDENT> if n[label] == label_value2: <NEW_LINE> <INDENT> n2 = n <NEW_LINE> <DEDENT> <DEDENT> if n1 == None or n2 ==None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> edge = self.g.add_edge(n1, n2, directed=True) <NEW_LINE> edge['property'] = default_property <NEW_LINE> edge['id'] = str(self.edge_id) <NEW_LINE> if property != None: <NEW_LINE> <INDENT> edge[property[0]] = edge[property[1]] <NEW_LINE> <DEDENT> self.edge_id += 1
Adds edge between two vertices. @param label: label or key value by which vertex will be searched e.g. "name" or "URI" @param label_value1 : value of above label or key of source vertex @param label_value2 : value of above label or key of target vertex @param default_property : default_property of edge (predicate in RDF terms) @param property : additional property, list of length 2 containing key name of property as first element and value of property as second element
625941ba0c0af96317bb808b
def _get_type_interface(self, res_type): <NEW_LINE> <INDENT> res_interface = dict(params={}, commands={}) <NEW_LINE> base_types = IonObject(res_type)._get_extends() <NEW_LINE> base_types.insert(0, res_type) <NEW_LINE> for rt in reversed(base_types): <NEW_LINE> <INDENT> type_interface = self.resource_interface.get(rt, None) <NEW_LINE> if not type_interface: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for tpar, tval in type_interface.iteritems(): <NEW_LINE> <INDENT> if tpar in res_interface: <NEW_LINE> <INDENT> rval = res_interface[tpar] <NEW_LINE> if isinstance(rval, dict): <NEW_LINE> <INDENT> rval.update(tval) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res_interface[tpar] = tval <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> res_interface[tpar] = dict(tval) if isinstance(tval, dict) else tval <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return res_interface
Creates a merge of params and commands up the type inheritance chain. Note: Entire param and command entries if subtypes replace their super types definition.
625941baaad79263cf3908dd
def update_site_forward(apps, schema_editor): <NEW_LINE> <INDENT> Site = apps.get_model("sites", "Site") <NEW_LINE> Site.objects.update_or_create( id=settings.SITE_ID, defaults={ "domain": "batallon15.com.ar", "name": "cookie", }, )
Set site domain and name.
625941ba627d3e7fe0d68cf0
def copy(self): <NEW_LINE> <INDENT> target = List() <NEW_LINE> for x in range(len(self._values)): <NEW_LINE> <INDENT> target._values.append(self._values[x]) <NEW_LINE> <DEDENT> return target
------------------------------------------------------- Duplicates the current list to a new list in the same order. Use: target = source.copy() ------------------------------------------------------- Returns: target - a copy of self (List) -------------------------------------------------------
625941ba099cdd3c635f0afe
def disable_screen_lock(self): <NEW_LINE> <INDENT> lockdb = '/data/system/locksettings.db' <NEW_LINE> sqlcommand = "update locksettings set value=\\'0\\' where name=\\'screenlock.disabled\\';" <NEW_LINE> self.execute('sqlite3 {} "{}"'.format(lockdb, sqlcommand), as_root=True)
Attempts to disable he screen lock on the device. .. note:: This does not always work... Added inversion 2.1.4
625941bad99f1b3c44c67437
def _handle_download_result( self, resource: resource_lib.Resource, tmp_dir_path: str, url_path: str, url_info: checksums.UrlInfo, ) -> str: <NEW_LINE> <INDENT> fnames = tf.io.gfile.listdir(tmp_dir_path) <NEW_LINE> if len(fnames) != 1: <NEW_LINE> <INDENT> raise ValueError( 'Download not found for url {} in: {}. Found {} files, but expected ' '1.'.format(resource.url, tmp_dir_path, len(fnames))) <NEW_LINE> <DEDENT> original_fname, = fnames <NEW_LINE> tmp_path = os.path.join(tmp_dir_path, original_fname) <NEW_LINE> resource_lib.write_info_file( resource=resource, path=url_path, dataset_name=self._dataset_name, original_fname=original_fname, url_info=url_info, ) <NEW_LINE> tf.io.gfile.rename(tmp_path, url_path, overwrite=True) <NEW_LINE> tf.io.gfile.rmtree(tmp_dir_path) <NEW_LINE> self._recorded_url_infos[resource.url] = url_info <NEW_LINE> dst_path = url_path <NEW_LINE> if self._register_checksums: <NEW_LINE> <INDENT> dst_path = self._save_url_info_and_rename( url=resource.url, url_path=url_path, url_info=url_info) <NEW_LINE> <DEDENT> elif resource.url not in self._url_infos: <NEW_LINE> <INDENT> if self._force_checksums_validation: <NEW_LINE> <INDENT> raise ValueError( 'Missing checksums url: {}, yet `force_checksums_validation=True`. ' 'Did you forgot to register checksums ?') <NEW_LINE> <DEDENT> <DEDENT> elif url_info != self._url_infos.get(resource.url, None): <NEW_LINE> <INDENT> raise NonMatchingChecksumError(resource.url, tmp_path) <NEW_LINE> <DEDENT> return dst_path
Post-processing of the downloaded file. * Write `.INFO` file * Rename `tmp_dir/file.xyz` -> `url_path` * Validate/record checksums * Eventually rename `url_path` -> `file_path` when `record_checksums=True` Args: resource: The url to download. tmp_dir_path: Temporary dir where the file was downloaded. url_path: Destination path. url_info: File checksums, size, computed during download. Returns: dst_path: `url_path` (or `file_path` when `register_checksums=True`) Raises: NonMatchingChecksumError:
625941ba460517430c39402f
def _handle_system_settings(self) -> Optional[float]: <NEW_LINE> <INDENT> request = typing.cast( Optional[frontend_pb.SystemSettingsRequest], self.store[states.StateSegment.SYSTEM_SETTINGS_REQUEST] ) <NEW_LINE> if request is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> response = typing.cast( Optional[frontend_pb.SystemSettings], self.store[states.StateSegment.SYSTEM_SETTINGS] ) <NEW_LINE> if response is None: <NEW_LINE> <INDENT> self._logger.error( 'SystemSettings was not initialized in the store!' ) <NEW_LINE> return None <NEW_LINE> <DEDENT> if request.seq_num != (response.seq_num + 1) % (2 ** 32): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> response.display_brightness = request.display_brightness <NEW_LINE> if int(request.date) != int(self.wall_time): <NEW_LINE> <INDENT> self._local_alarms.input(log.LocalLogInputEvent( wall_time=self.wall_time, new_event=mcu_pb.LogEvent( code=mcu_pb.LogEventCode.sysclock_changed, type=mcu_pb.LogEventType.system, old_float=self.wall_time, new_float=request.date ) )) <NEW_LINE> self._event_log_receiver.input(self._local_alarms.output()) <NEW_LINE> <DEDENT> response.date = request.date <NEW_LINE> response.seq_num = request.seq_num <NEW_LINE> return response.date
Run the SystemSettings request/response service. Returns the new system (wall) clock time to set on the system, if it should be changed.
625941bad486a94d0b98dfee
def debugPrint(self): <NEW_LINE> <INDENT> logging.debug("%s,i[%s],f[%s]", self._fcstTime, self._issueHour, self._forecastHour)
logging debug of content
625941ba3eb6a72ae02ec376
def getMat(self,MID): <NEW_LINE> <INDENT> if not MID in self.matDict.keys(): <NEW_LINE> <INDENT> raise KeyError('The MID provided is not linked with any materials'+ 'within the supplied material library.') <NEW_LINE> <DEDENT> return self.matDict[MID]
Method that returns a material from the material libary :Args: - `MID (int)`: The ID of the material which is desired :Returns: - `(obj): A material object associated with the key MID
625941ba56b00c62f0f144f8
def input_fn(data_file, num_epochs, shuffle, batch_size): <NEW_LINE> <INDENT> assert tf.gfile.Exists(data_file), ( '%s not found. Please make sure you have either run data_download.py or ' 'set both arguments --train_data and --test_data.' % data_file) <NEW_LINE> def parse_csv(value): <NEW_LINE> <INDENT> print('Parsing', data_file) <NEW_LINE> columns = tf.decode_csv(value, record_defaults=_CSV_COLUMN_DEFAULTS) <NEW_LINE> features = dict(zip(_CSV_COLUMNS, columns)) <NEW_LINE> labels = features.pop('price') <NEW_LINE> return features, tf.greater(labels, 540088.1) <NEW_LINE> <DEDENT> dataset = tf.data.TextLineDataset(data_file) <NEW_LINE> if shuffle: <NEW_LINE> <INDENT> dataset = dataset.shuffle(buffer_size=_NUM_EXAMPLES['train']) <NEW_LINE> <DEDENT> dataset = dataset.map(parse_csv, num_parallel_calls=5) <NEW_LINE> dataset = dataset.repeat(num_epochs) <NEW_LINE> dataset = dataset.batch(batch_size) <NEW_LINE> iterator = dataset.make_one_shot_iterator() <NEW_LINE> features, labels = iterator.get_next() <NEW_LINE> return features, labels
Generate an input function for the Estimator.
625941ba1d351010ab8559bf
def test_autonomous(control, fake_time, robot): <NEW_LINE> <INDENT> control.set_autonomous(enabled=True) <NEW_LINE> control.run_test(lambda tm: tm < 15) <NEW_LINE> assert int(fake_time.get()) == 15
Runs autonomous mode by itself
625941ba97e22403b379ce3a
def play(sound): <NEW_LINE> <INDENT> flags = winsound.SND_FILENAME | winsound.SND_NODEFAULT | winsound.SND_ASYNC <NEW_LINE> if not os.path.isfile(sound): <NEW_LINE> <INDENT> print("* Sound error: File not found '%s'. *" % sound) <NEW_LINE> <DEDENT> winsound.PlaySound(sound, flags)
Plays the specified sound file, asynchronously. Use the predefined SND-parameters in this module to specify what sound to play. Any working path can however be specified. If the sound file is not found, no exception is raised.
625941ba66673b3332b91f32
def get_mode(self) -> Optional[int]: <NEW_LINE> <INDENT> if len(self.modes) == 1: <NEW_LINE> <INDENT> return self.modes[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Return mode (32/64) of this platform if unique, otherwise None
625941ba66673b3332b91f33
def findDuplicate(self, paths): <NEW_LINE> <INDENT> M = collections.defaultdict(list) <NEW_LINE> for line in paths: <NEW_LINE> <INDENT> data = line.split() <NEW_LINE> root = data[0] <NEW_LINE> for file in data[1:]: <NEW_LINE> <INDENT> name, _, content = file.partition('(') <NEW_LINE> M[content[:-1]].append(root + '/' + name) <NEW_LINE> <DEDENT> <DEDENT> return [x for x in M.values() if len(x) > 1]
:type paths: List[str] :rtype: List[List[str]]
625941bae5267d203edcdb42
@app.route('/canaries') <NEW_LINE> def canaries(): <NEW_LINE> <INDENT> return flask.render_template('canaries.tpl')
Renders canaries page
625941ba507cdc57c6306b76
def saveFileDialog(self, event): <NEW_LINE> <INDENT> dialog = QtGui.QFileDialog() <NEW_LINE> filename, fileType = dialog.getSaveFileName(self, "Save File", self.saveFilePath, options= QtGui.QFileDialog.DontUseNativeDialog) <NEW_LINE> if not str(filename).endswith('.mov'): <NEW_LINE> <INDENT> filename = str(filename) + '.mov' <NEW_LINE> <DEDENT> self.fileEdit.setText(str(filename))
Opens a file browser when the text box is clicked. :param event: Event triggered when the text box is clicked. :return:
625941ba236d856c2ad4467f
def display_menu(): <NEW_LINE> <INDENT> print("1-Insert; 2-Remove; 3-Info; 4-Evaluate; 5-Scaling; 6-Derive; 7-Integrate") <NEW_LINE> print("8-Summation; 9-Subtract; 10-Multiply; 11-Divide")
Display the list of polynomial available tools
625941bad4950a0f3b08c1f3
def forward(self, input, hidden1, add_logit=None, logit_mode=False, schedule=None): <NEW_LINE> <INDENT> if len(input.shape)==2: <NEW_LINE> <INDENT> input=input.view(1,input.shape[0],input.shape[1]) <NEW_LINE> <DEDENT> hout, hn = self.gru(input,hidden1) <NEW_LINE> if self.bidirectional: <NEW_LINE> <INDENT> hout_leftshf=torch.cat((hout[2:,:,self.hidden_size:],self.pad),dim=0) <NEW_LINE> hout=torch.cat((hout[:,:,:self.hidden_size],hout_leftshf),dim=-1) <NEW_LINE> <DEDENT> output = self.h2o(hout) <NEW_LINE> self.hout=hout <NEW_LINE> if add_logit is not None: <NEW_LINE> <INDENT> output=output+add_logit <NEW_LINE> <DEDENT> if not logit_mode: <NEW_LINE> <INDENT> output=self.softmax(output) <NEW_LINE> <DEDENT> return output,hn
Forward :param input: :param hidden: :return:
625941ba5e10d32532c5edd0
def run_pre_send(self, message): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pre_send.send(self.__class__, message=message, esp_name=self.esp_name) <NEW_LINE> return True <NEW_LINE> <DEDENT> except AnymailCancelSend: <NEW_LINE> <INDENT> return False
Send pre_send signal, and return True if message should still be sent
625941ba796e427e537b0464
def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkImageDuplicatorIUC3.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj
New() -> itkImageDuplicatorIUC3 Create a new object of the class itkImageDuplicatorIUC3 and set the input and the parameters if some named or non-named arguments are passed to that method. New() tries to assign all the non named parameters to the input of the new objects - the first non named parameter in the first input, etc. The named parameters are used by calling the method with the same name prefixed by 'Set'. Ex: itkImageDuplicatorIUC3.New( reader, Threshold=10 ) is (most of the time) equivalent to: obj = itkImageDuplicatorIUC3.New() obj.SetInput( 0, reader.GetOutput() ) obj.SetThreshold( 10 )
625941ba21bff66bcd6847f7
def get_string( self ): <NEW_LINE> <INDENT> return self.message.as_string()
Returns the string representation of the supplied message.
625941ba7c178a314d6ef2fb
def status(self): <NEW_LINE> <INDENT> oids = [p.get_oid for p in self.parameters] <NEW_LINE> args = ['/usr/bin/snmpget', '-v', '1', '-c', 'public', self._ip] + oids <NEW_LINE> try: <NEW_LINE> <INDENT> output = subprocess.check_output(args, universal_newlines=True, timeout=self._query_timeout) <NEW_LINE> lines = output.strip().split('\n') <NEW_LINE> if self._last_command_failed: <NEW_LINE> <INDENT> log.info(self._log_name, 'Restored contact with ' + self._ip) <NEW_LINE> self._last_command_failed = False <NEW_LINE> <DEDENT> return {k.name: k.parse_snmpget_output(v) for k, v in zip(self.parameters, lines)} <NEW_LINE> <DEDENT> except Exception as exception: <NEW_LINE> <INDENT> print('{} ERROR: failed to query {}: {}' .format(datetime.datetime.utcnow(), self._ip, str(exception))) <NEW_LINE> if not self._last_command_failed: <NEW_LINE> <INDENT> log.error(self._log_name, 'Lost contact with ' + self._ip) <NEW_LINE> self._last_command_failed = True <NEW_LINE> <DEDENT> return {k.name: k.error_value for k in self.parameters}
Return a dictionary of parameter values for this device
625941baf7d966606f6a9eaa
def __call__(self, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> x = self.avgpool(x) <NEW_LINE> x = self.classifier(x) <NEW_LINE> return x
Forward compute of MobileNetV3 for classification.
625941ba15baa723493c3e14
@mock.patch.object(Heroku, 'api_request') <NEW_LINE> def test_heroku_create_build_app_name(api_request): <NEW_LINE> <INDENT> heroku = Heroku() <NEW_LINE> result = heroku.create_build('tarball-url', app_name='app-name-123') <NEW_LINE> api_request.assert_called_with( 'POST', '/app-setups', data={ 'source_blob': {'url': 'tarball-url'}, 'app': {'name': 'app-name-123'}, }, )
Heroku.create_build should send the app name.
625941ba16aa5153ce36231a
def isNull(self, *__args): <NEW_LINE> <INDENT> return False
isNull(self, int) -> bool isNull(self, str) -> bool
625941baa8370b7717052742
def find_element(self, strategy, locator, root=None): <NEW_LINE> <INDENT> elements = self.find_elements(strategy, locator, root=root) <NEW_LINE> return elements and elements.first or None
Finds an element on the page. :param strategy: Location strategy to use. See pypom.splinter_driver.ALLOWED_STRATEGIES for valid values. :param locator: Location of target element. :type strategy: str :type locator: str :return: :py:class:`~splinter.driver.webdriver.WebDriverElement`. :rtype: splinter.driver.webdriver.WebDriverElement
625941ba498bea3a759b9952
def twoSum(self, nums, target): <NEW_LINE> <INDENT> res = [] <NEW_LINE> dict = {} <NEW_LINE> for i in range(len(nums)): <NEW_LINE> <INDENT> if nums[i] in dict: <NEW_LINE> <INDENT> res.append([nums[dict[nums[i]]], nums[i]]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict[target-nums[i]] = i <NEW_LINE> <DEDENT> <DEDENT> return(res)
:type nums: List[int] :type target: int :rtype: List[int]
625941bab830903b967e97b8
def makePNG(self, counter, path=None, title=None): <NEW_LINE> <INDENT> if self.filename is None: <NEW_LINE> <INDENT> raise ValueError('No image selected!') <NEW_LINE> <DEDENT> if not qt.QApplication.instance(): <NEW_LINE> <INDENT> app = qt.QApplication(sys.argv) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> app = qt.QApplication.instance() <NEW_LINE> <DEDENT> win = qt.QWidget() <NEW_LINE> grid = qt.QGridLayout() <NEW_LINE> win.setLayout(grid) <NEW_LINE> win.imageCanvas = qt.QLabel() <NEW_LINE> grid.addWidget(win.imageCanvas) <NEW_LINE> win.imageCanvas, pixmap = MakeDeviceImage._renderImage(self._data, win.imageCanvas, self.filename, title) <NEW_LINE> filename = '{:03d}_deviceimage.png'.format(counter) <NEW_LINE> if path: <NEW_LINE> <INDENT> filename = os.path.join(path, filename) <NEW_LINE> <DEDENT> pixmap.save(filename, 'png')
Render the image with new voltage values and save it to disk Args: counter (int): A counter for the experimental run number
625941bad10714528d5ffb82
def is_sortable(self, name): <NEW_LINE> <INDENT> return name in self._sortable_columns
Verify if column is sortable. :param name: Column name.
625941ba4e4d5625662d427e