code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def shoppingOffers(self, price, special, needs): <NEW_LINE> <INDENT> valid_offers = [] <NEW_LINE> for s in special: <NEW_LINE> <INDENT> if all(a <= b for a,b in zip(s[:-1],needs)): <NEW_LINE> <INDENT> valid_offers.append(s) <NEW_LINE> <DEDENT> <DEDENT> dp = collections.defaultdict(int) <NEW_LINE> def dfs(offers, needs): <NEW_LINE> <INDENT> if str(needs) in dp: <NEW_LINE> <INDENT> return dp[str(needs)] <NEW_LINE> <DEDENT> for i, count in enumerate(needs): <NEW_LINE> <INDENT> dp[str(needs)] += price[i]*count <NEW_LINE> <DEDENT> for sp in offers: <NEW_LINE> <INDENT> amount, cost = sp[:-1],sp[-1] <NEW_LINE> remain = [] <NEW_LINE> for a,b in zip(amount, needs): <NEW_LINE> <INDENT> remain.append(b-a) <NEW_LINE> <DEDENT> if any(x < 0 for x in remain): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> dp[str(needs)] = min(dp[str(needs)],cost + dfs(offers, remain)) <NEW_LINE> <DEDENT> return dp[str(needs)] <NEW_LINE> <DEDENT> dfs(valid_offers, needs) <NEW_LINE> return dp[str(needs)]
:type price: List[int] :type special: List[List[int]] :type needs: List[int] :rtype: int
625941b6d8ef3951e3243354
@app.before_request <NEW_LINE> def before_request(): <NEW_LINE> <INDENT> g.db = connect_db()
Make sure we are connected to database after each request.
625941b6cb5e8a47e48b78c7
def index(): <NEW_LINE> <INDENT> import datetime <NEW_LINE> now=datetime.datetime.now() <NEW_LINE> x=now+datetime.timedelta(-60) <NEW_LINE> rows=db(db.company_posting.posted_on<x).update(display=False) <NEW_LINE> rows=db(db.company_posting.posted_on<x).update(status=False) <NEW_LINE> if auth.user: <NEW_LINE> <INDENT> if auth.has_membership(5): <NEW_LINE> <INDENT> redirect(URL('s_controller','student_home')) <NEW_LINE> <DEDENT> if auth.has_membership(4): <NEW_LINE> <INDENT> redirect(URL('c_controller','company_home')) <NEW_LINE> <DEDENT> if auth.has_membership(27): <NEW_LINE> <INDENT> redirect(URL('tpo','home')) <NEW_LINE> <DEDENT> <DEDENT> return dict(message=T('Welcome Placement App'))
example action using the internationalization operator T and flash rendered by views/default/index.html or views/generic.html if you need a simple wiki simply replace the two lines below with: return auth.wiki()
625941b6e5267d203edcdab8
def test_strict_traits(self): <NEW_LINE> <INDENT> p = Prop() <NEW_LINE> self.assertRaises(traits.TraitError, setattr, p, 'foo', 1) <NEW_LINE> self.assertRaises(traits.TraitError, setattr, p, '_foo', 1) <NEW_LINE> self.assertRaises(traits.TraitError, setattr, p, '__foo', 1)
Test if TVTK objects use strict traits.
625941b65e10d32532c5ed46
def fibonacci_series(n): <NEW_LINE> <INDENT> a, b = 0, 1 <NEW_LINE> while a < n: <NEW_LINE> <INDENT> print(a, end=' ') <NEW_LINE> a, b = b, a + b <NEW_LINE> <DEDENT> print()
generate the Fibonacci sequence to that number or to the Nth number.
625941b65fcc89381b1e14db
def cl_change_config(self, console_format=FMT_FULL, resultsdir=None): <NEW_LINE> <INDENT> self.cl_resultsdir = resultsdir <NEW_LINE> self.cl_console_format = console_format <NEW_LINE> self.cl_config()
Change the config of the log
625941b62ae34c7f2600cf49
def check_trailing_whitespace(df, csv): <NEW_LINE> <INDENT> status = True <NEW_LINE> for column in df: <NEW_LINE> <INDENT> if is_string(df[column]): <NEW_LINE> <INDENT> has_trailing_ws = df[column].str.endswith(' ', na=False).any() <NEW_LINE> if has_trailing_ws: <NEW_LINE> <INDENT> status = False <NEW_LINE> print(csv, ': Trailing whitespace in column: ', column) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return status
Loop over string columns and check for any trailing whitespace
625941b63317a56b86939a81
def handleFinishSessionRequest(intent, session): <NEW_LINE> <INDENT> card_title = "Goodbye" <NEW_LINE> speech_output = "There you go" <NEW_LINE> reprompt_text = "There you go" <NEW_LINE> should_end_session = True <NEW_LINE> speechlet_response = build_speechlet_response(card_title, speech_output,reprompt_text, should_end_session) <NEW_LINE> return build_response(session_attributes, speechlet_response)
Called before ending the session
625941b68e7ae83300e4ade2
def change_share(self, user_id_to_change): <NEW_LINE> <INDENT> for user_id in self.shareViewAndEdit: <NEW_LINE> <INDENT> if(user_id == user_id_to_change): <NEW_LINE> <INDENT> self.shareViewAndEdit.remove(user_id_to_change) <NEW_LINE> self.shareView.append(user_id_to_change) <NEW_LINE> <DEDENT> <DEDENT> for user_id in self.shareView: <NEW_LINE> <INDENT> if(user_id == user_id_to_change): <NEW_LINE> <INDENT> self.shareView.remove(user_id_to_change) <NEW_LINE> self.shareViewAndEdit.append(user_id_to_change)
The methos change the user_id_to_change between shareViewAndEdit an shareView lists
625941b6d18da76e235322e8
def test_add_two_data_different(self): <NEW_LINE> <INDENT> self.mf.add("foo", a=1) <NEW_LINE> self.mf.add("foo", b=1) <NEW_LINE> self.assertEqual(self.mf._cnt, dict(foo=[{'a':1}, {'b':1}]))
Adds two elements, different data.
625941b6046cf37aa974cb62
def all_rule(self, rule): <NEW_LINE> <INDENT> operator_name, operator_value = self.get_operator(rule.get("operator")) <NEW_LINE> all_rules = [ self.eval_operator( operator_name, self.workflow.key_index.get(key, {}).get("value"), operator_value ) for key in rule["key"] ] <NEW_LINE> if all(all_rules): <NEW_LINE> <INDENT> return rule["value"] <NEW_LINE> <DEDENT> return None
All rule dictates all values must be evaluated truthfully.
625941b6097d151d1a222c73
def __init__(self, url, gis, initialize=False): <NEW_LINE> <INDENT> if url.lower().find("uploads") < -1: <NEW_LINE> <INDENT> self._url = url + "/uploads" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._url = url <NEW_LINE> <DEDENT> self._con = gis <NEW_LINE> self._json_dict = {} <NEW_LINE> self._json = ""
Constructor
625941b6fff4ab517eb2f250
@singledispatch <NEW_LINE> def translate(node): <NEW_LINE> <INDENT> raise NotImplementedError('Python AST Generation Error: translate(%r)' % node)
Recursively transform a Romnomnom AST node into a Python AST node.
625941b660cbc95b062c6360
@node.commandWrap <NEW_LINE> def PaintEffectsMeshQuality(*args, **kwargs): <NEW_LINE> <INDENT> u <NEW_LINE> return cmds.PaintEffectsMeshQuality(*args, **kwargs)
:rtype: list|str|basestring|DagNode|AttrObject|ArrayAttrObject|Components1Base
625941b68e05c05ec3eea188
def runParamsFromJSON(self, jsonObj): <NEW_LINE> <INDENT> self.__params = jsonObj['__values__']
Populates the values from the json object
625941b6e1aae11d1e749acb
def test_syntax_exceptions(self): <NEW_LINE> <INDENT> test_list = [ "[a-z]{a}", "[a-]", "[[1-9]", "((foo)(bar)))", "|foo", "[1-10]{6:}", ] <NEW_LINE> for test in test_list: <NEW_LINE> <INDENT> self.assertRaises( StringGenerator.SyntaxError, lambda: StringGenerator(test).render() )
Make sure syntax errors in template are caught.
625941b6099cdd3c635f0a74
def __init__(self, path, fmt=None, size=None, exists=False, parents=None): <NEW_LINE> <INDENT> if not os.path.isabs(path): <NEW_LINE> <INDENT> raise ValueError("FileDevice requires an absolute path") <NEW_LINE> <DEDENT> StorageDevice.__init__(self, path, fmt=fmt, size=size, exists=exists, parents=parents)
:param path: full path to the file :type path: str :keyword exists: does this device exist? :type exists: bool :keyword size: the device's size :type size: :class:`~.size.Size` :keyword parents: a list of parent devices :type parents: list of :class:`StorageDevice` :keyword fmt: this device's formatting :type fmt: :class:`~.formats.DeviceFormat` or a subclass of it
625941b6c4546d3d9de72848
def __eq__(self, other): <NEW_LINE> <INDENT> for row in range( 3 ): <NEW_LINE> <INDENT> if self.cells[row] != other.cells[row]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Overloads '==' such that two eightPuzzles with the same configuration are equal. >>> EightPuzzleState([0, 1, 2, 3, 4, 5, 6, 7, 8]) == EightPuzzleState([1, 0, 2, 3, 4, 5, 6, 7, 8]).result('left') True
625941b65510c4643540f210
@predicate('descendants(set)', safe=True) <NEW_LINE> def descendants(repo, subset, x): <NEW_LINE> <INDENT> return _descendants(repo, subset, x)
Changesets which are descendants of changesets in set.
625941b6d99f1b3c44c673af
def update(self, x, y): <NEW_LINE> <INDENT> self.all_x = x <NEW_LINE> self.all_y = y <NEW_LINE> self.n_pixel_per_frame.append(len(self.all_x)) <NEW_LINE> self.recent_x_fitted.extend(self.all_x) <NEW_LINE> if len(self.n_pixel_per_frame) > self.n_frames: <NEW_LINE> <INDENT> n_x_to_remove = self.n_pixel_per_frame.pop(0) <NEW_LINE> self.recent_x_fitted = self.recent_x_fitted[n_x_to_remove:] <NEW_LINE> <DEDENT> self.best_x = np.mean(self.recent_x_fitted) <NEW_LINE> self.current_fit = np.polyfit(self.all_x, self.all_y, 2) <NEW_LINE> if self.best_fit is None: <NEW_LINE> <INDENT> self.best_fit = self.current_fit <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.best_fit = (self.best_fit * (self.n_frames - 1) + self.current_fit) / self.n_frames <NEW_LINE> <DEDENT> self.current_fit_poly = np.poly1d(self.current_fit) <NEW_LINE> self.best_fit_poly = np.poly1d(self.best_fit)
Update the lane parameters. :param x: A list of x coordinates :param y: A list of y coordinates
625941b6460517430c393fa7
def use_logger(): <NEW_LINE> <INDENT> data_logger = logging.getLogger(DATA_LOGGER_NAME) <NEW_LINE> data_logger.debug(f"my-measurement,host=host1 temperature=25.3 {int(time.time() * 1e9)}") <NEW_LINE> data_logger.debug( Point('my-measurement') .tag('host', 'host1') .field('temperature', 25.3) .time(datetime.datetime.utcnow(), WritePrecision.MS) )
Use the logger. This can happen in any submodule.
625941b6ec188e330fd5a5be
def test_bucket_only_single_bucket(self): <NEW_LINE> <INDENT> bucket_name = self.MakeTempName('bucket', prefix='aaa-') <NEW_LINE> self.CreateBucket(bucket_name) <NEW_LINE> request = '%s://%s' % (self.default_provider, bucket_name[:-2]) <NEW_LINE> expected_result = '//%s ' % bucket_name <NEW_LINE> self.RunGsUtilTabCompletion(['rb', request], expected_results=[expected_result])
Tests bucket-only tab completion matching a single bucket.
625941b6498bea3a759b98c9
def commit(self): <NEW_LINE> <INDENT> self.execute_unnamed(self._cursor, "commit")
Commits the current database transaction. This function is part of the `DBAPI 2.0 specification <http://www.python.org/dev/peps/pep-0249/>`_.
625941b66fece00bbac2d552
def get_command_processes(command): <NEW_LINE> <INDENT> if "|" in command: <NEW_LINE> <INDENT> command_parts = command.split("|") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command_parts = [command] <NEW_LINE> <DEDENT> processes = [] <NEW_LINE> for command_part in command_parts: <NEW_LINE> <INDENT> if processes: <NEW_LINE> <INDENT> processes.append(Popen(shlex.split(command_part), stdin=processes[-1].stdout, stdout=PIPE, stderr=PIPE)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> processes.append(Popen(shlex.split(command_part), stdin=None, stdout=PIPE, stderr=PIPE)) <NEW_LINE> <DEDENT> <DEDENT> return processes
:param string command: a string of command(s) to run that may include pipes :return: a list of Popen objects
625941b67c178a314d6ef26f
def get_info( self, comment_id: str, fields: Optional[Union[str, list, tuple]] = None, return_json: bool = False, ) -> Union[IgBusComment, dict]: <NEW_LINE> <INDENT> if fields is None: <NEW_LINE> <INDENT> fields = const.IG_BUSINESS_MEDIA_PUBLIC_FIELDS <NEW_LINE> <DEDENT> data = self.client.get_object( object_id=comment_id, fields=enf_comma_separated(field="fields", value=fields), ) <NEW_LINE> if return_json: <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return IgBusComment.new_from_json_dict(data=data)
Get information about a Business comment. :param comment_id: ID for Comment. :param fields: Comma-separated id string for data fields which you want. You can also pass this with an id list, tuple. :param return_json: Set to false will return a dataclass for IgBusComment. Or return json data. Default is false. :return: Business comment information.
625941b6097d151d1a222c74
def get_exchange_instance(self): <NEW_LINE> <INDENT> instrument = self.saver.instrument(kind = "cryptocurrency") <NEW_LINE> m30 = self.saver.timeframe(id = "30M", description = "thirty minutes") <NEW_LINE> h1 = self.saver.timeframe(id = "1H", description = "one hour") <NEW_LINE> d1 = self.saver.timeframe(id = "1D", description = "one day") <NEW_LINE> broker = self.saver.broker(name = "bittrex", site = "www.bittrex.com") <NEW_LINE> datasource = self.saver.data_source(name = "bittrex", site = "www.bittrex.com", timeframes = [m30, h1, d1]) <NEW_LINE> timetable = None <NEW_LINE> exchange = self.saver.exchange(name = "bittrex", website = "www.bittrex.com", data_source = datasource, timetable = timetable, brokers = [broker]) <NEW_LINE> return exchange
Save all informations about Bittrex and returns an Exchange instance. Return: exchange (alchemist_lib.database.exchange.Exchange): Exchange instance.
625941b60383005118ecf3fc
def _is_label_reference(self, key, axis=0) -> bool_t: <NEW_LINE> <INDENT> axis = self._get_axis_number(axis) <NEW_LINE> other_axes = (ax for ax in range(self._AXIS_LEN) if ax != axis) <NEW_LINE> return ( key is not None and is_hashable(key) and any(key in self.axes[ax] for ax in other_axes) )
Test whether a key is a label reference for a given axis. To be considered a label reference, `key` must be a string that: - (axis=0): Matches a column label - (axis=1): Matches an index label Parameters ---------- key: str Potential label name axis: int, default 0 Axis perpendicular to the axis that labels are associated with (0 means search for column labels, 1 means search for index labels) Returns ------- is_label: bool
625941b68c0ade5d55d3e7d7
def get_unfolded_with_no_errors(self): <NEW_LINE> <INDENT> if getattr(self, 'unfolded_no_err', None) is None: <NEW_LINE> <INDENT> self.unfolded_no_err = self.get_output().Clone("unfolded_no_err") <NEW_LINE> cu.remove_th1_errors(self.unfolded_no_err) <NEW_LINE> <DEDENT> return self.unfolded_no_err
Make copy of unfolded, but with no error bars
625941b6d10714528d5ffaf7
def search(self, item): <NEW_LINE> <INDENT> cursor = self.__head <NEW_LINE> while cursor: <NEW_LINE> <INDENT> if cursor.element == item: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cursor = cursor.next <NEW_LINE> <DEDENT> <DEDENT> return False
查找元素O(n)
625941b6a8ecb033257d2eee
def check_multiple(couples, desc=None): <NEW_LINE> <INDENT> check('list[>0](tuple(str, *))', couples, 'I expect a non-empty list of (object, string) tuples.') <NEW_LINE> contracts = [x[0] for x in couples] <NEW_LINE> values = [x[1] for x in couples] <NEW_LINE> try: <NEW_LINE> <INDENT> return check_contracts(contracts, values) <NEW_LINE> <DEDENT> except ContractNotRespected as e: <NEW_LINE> <INDENT> if desc is not None: <NEW_LINE> <INDENT> e.error = '%s\n%s' % (desc, e.error) <NEW_LINE> <DEDENT> raise e
Checks multiple couples of (contract, value) in the same context. This means that the variables in each contract are shared with the others. :param couples: A list of tuple (contract, value) to check. :type couples: ``list[>0](tuple(str, *))`` :param desc: An optional description of the error. If given, it is included in the error message. :type desc: ``None|str``
625941b68a349b6b435e7f8c
def test_little_endian_bytes_to_int_5(self): <NEW_LINE> <INDENT> byte_seq = (0xff, 0xff) <NEW_LINE> integer = utils.little_endian_bytes_to_int(byte_seq) <NEW_LINE> expected_integer = 0xffff <NEW_LINE> self.assertEqual(integer, expected_integer)
Check the returned value of utils.int_to_little_endian_bytes().
625941b62eb69b55b151c6c2
def test_usernameAvailability(self): <NEW_LINE> <INDENT> signup = self.createFreeSignup(free_signup.userInfo.itemClass) <NEW_LINE> self.failUnless(signup.usernameAvailable(u'alice', u'localhost')[0]) <NEW_LINE> self.failIf(signup.usernameAvailable(u'alice', u'example.com')[0]) <NEW_LINE> self.failIf(signup.usernameAvailable(u'admin', u'localhost')[0]) <NEW_LINE> self.assertEquals(signup.usernameAvailable(u'fjones', u'localhost'), [True, u'Username already taken']) <NEW_LINE> signup.createUser( realName=u"Frank Jones", username=u'fjones', domain=u'localhost', password=u'asdf', emailAddress=u'[email protected]') <NEW_LINE> self.assertEquals(signup.usernameAvailable(u'fjones', u'localhost'), [False, u'Username already taken']) <NEW_LINE> ss = self.ls.accountByAddress(u"fjones", u"localhost").avatars.open() <NEW_LINE> self.assertEquals(ss.query(Installation).count(), 1)
Test that the usernames which ought to be available are and that those which aren't are not: Only syntactically valid localparts are allowed. Localparts which are already assigned are not allowed. Only domains which are actually served by this mantissa instance are allowed.
625941b6627d3e7fe0d68c66
def _test_m2m_create(self, M2MFieldClass): <NEW_LINE> <INDENT> class LocalBookWithM2M(Model): <NEW_LINE> <INDENT> author = ForeignKey(Author) <NEW_LINE> title = CharField(max_length=100, db_index=True) <NEW_LINE> pub_date = DateTimeField() <NEW_LINE> tags = M2MFieldClass("TagM2MTest", related_name="books") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'schema' <NEW_LINE> apps = new_apps <NEW_LINE> <DEDENT> <DEDENT> self.local_models = [LocalBookWithM2M] <NEW_LINE> with connection.schema_editor() as editor: <NEW_LINE> <INDENT> editor.create_model(Author) <NEW_LINE> editor.create_model(TagM2MTest) <NEW_LINE> editor.create_model(LocalBookWithM2M) <NEW_LINE> <DEDENT> columns = self.column_classes(LocalBookWithM2M._meta.get_field("tags").rel.through) <NEW_LINE> self.assertEqual(columns['tagm2mtest_id'][0], "IntegerField")
Tests M2M fields on models during creation
625941b663d6d428bbe44307
def ExecuteFunction(self, request_iterator, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
/ Requests a function to be executed as specified in header.
625941b6baa26c4b54cb0f3b
def test_parse_reproduce(self): <NEW_LINE> <INDENT> main.execute(['reproduce', '1234']) <NEW_LINE> main.execute( ['reproduce', '1234', '--disable-xvfb', '-j', '25', '--current', '--disable-goma', '-i', '500', '--target-args', '--test --test2', '--edit-mode', '--disable-gclient', '--enable-debug', '-l', '20']) <NEW_LINE> self.mock.start_loggers.assert_has_calls([mock.call()]) <NEW_LINE> self.mock.execute.assert_has_calls([ mock.call(build='chromium', current=False, disable_goma=False, goma_threads=None, testcase_id='1234', iterations=3, disable_xvfb=False, target_args='', edit_mode=False, disable_gclient=False, enable_debug=False, goma_load=None), mock.call(build='chromium', current=True, disable_goma=True, goma_threads=25, testcase_id='1234', iterations=500, disable_xvfb=True, target_args='--test --test2', edit_mode=True, disable_gclient=True, enable_debug=True, goma_load=20), ])
Test parse reproduce command.
625941b623849d37ff7b2eaa
def _run(fn, args, kwargs, queue): <NEW_LINE> <INDENT> queue.put_nowait(_run_and_catch(fn, args, kwargs))
Run a function and place the result in a queue for later processing.
625941b69c8ee82313fbb58c
def test_mhs_set_amplitude(): <NEW_LINE> <INDENT> with expected_protocol(ik.minghe.MHS5200, [], [], sep="\r\n") as mhs: <NEW_LINE> <INDENT> with pytest.raises(NotImplementedError): <NEW_LINE> <INDENT> mhs._set_amplitude_(1, 2)
Raise NotImplementedError when trying to set amplitude
625941b6a79ad161976cbf5d
def defectEigenstates(depth,width,center,left,right,nMin,nMax,dx,dx_solve): <NEW_LINE> <INDENT> U = defectPotential(depth,width,center,left,right,dx_solve) <NEW_LINE> return potentialEigenstates(U,nMin,nMax,left,right,dx,dx_solve)
returns energy levels and eigenstates of a SHO+defect potential depth: depth of the defect width: width of the defect center: center position of the defect [left, right]: region where the potential is defined dx_solve: mesh size to use when solving schrodinger's eqn dx: mesh size to use for returned wavefunctions
625941b6091ae35668666d7e
@assert_auth <NEW_LINE> def alpha_128(code, end_date=None): <NEW_LINE> <INDENT> func_name = sys._getframe().f_code.co_name <NEW_LINE> return JQDataClient.instance().get_alpha_191(**locals())
公式: 100-(100/(1+SUM(((HIGH+LOW+CLOSE)/3>DELAY((HIGH+LOW+CLOSE)/3,1)?(HIGH+LOW+CLOSE)/3*VOLUM E:0),14)/SUM(((HIGH+LOW+CLOSE)/3<DELAY((HIGH+LOW+CLOSE)/3,1)?(HIGH+LOW+CLOSE)/3*VOLUME:0), 14))) Inputs: code: 股票池 end_date: 查询日期 Outputs: 因子的值
625941b6fb3f5b602dac34a6
def test_write_file_ipv4(self): <NEW_LINE> <INDENT> name = "test_write_file_v4.txt" <NEW_LINE> content = "write test file content ipv4" <NEW_LINE> share_path = os.path.join(self.public_dir, name) <NEW_LINE> assert not os.path.exists(share_path) <NEW_LINE> local_path = tempfile.mktemp() <NEW_LINE> with salt.utils.files.fopen(local_path, "w") as fp: <NEW_LINE> <INDENT> fp.write(content) <NEW_LINE> <DEDENT> conn = salt.utils.smb.get_conn("127.0.0.1", self.username, "foo", port=1445) <NEW_LINE> salt.utils.smb.put_file(local_path, name, "public", conn=conn) <NEW_LINE> conn.close() <NEW_LINE> assert os.path.exists(share_path) <NEW_LINE> with salt.utils.files.fopen(share_path, "r") as fp: <NEW_LINE> <INDENT> result = fp.read() <NEW_LINE> <DEDENT> assert result == content
Transfer a file over SMB
625941b61b99ca400220a8c8
def __init__( self, attributes: List[str], object_pool: List[dict], command: str, stop_command="!stop", ): <NEW_LINE> <INDENT> self.responses = {} <NEW_LINE> self.active = False <NEW_LINE> self.cluetime = 10 <NEW_LINE> self.callID = None <NEW_LINE> self.statToSet = {} <NEW_LINE> self.stop_command = stop_command <NEW_LINE> self.points = 30 <NEW_LINE> self._attributes = attributes <NEW_LINE> self.attributes = attributes <NEW_LINE> self.object_pool = object_pool <NEW_LINE> self.object_to_guess = None <NEW_LINE> self.command = command
Initialize variables. Objects need a "name" field. When implementing this, you need to implement a method for every hint, called like this: '_<stat>_hint(obj)' which returns the string hint to send in the channel.
625941b6e5267d203edcdab9
def test_humantime(): <NEW_LINE> <INDENT> assert humantime(1e6) == u'1 weeks, 4 days, 13 hours, 46 min., 40 s' <NEW_LINE> assert humantime(2e5) == u'2 days, 7 hours, 33 min., 20 s' <NEW_LINE> assert humantime(5e3) == u'1 hours, 23 min., 20 s' <NEW_LINE> assert humantime(60) == u'1 min., 0 s' <NEW_LINE> assert humantime(1) == u'1 s' <NEW_LINE> assert humantime(0) == u'0 s' <NEW_LINE> assert humantime(0.1) == u'100 ms' <NEW_LINE> assert humantime(0.005) == u'5 ms' <NEW_LINE> assert humantime(1e-5) == u'10 μs' <NEW_LINE> assert humantime(5.25e-4) == u'525 μs' <NEW_LINE> assert humantime(5e-7) == u'500 ns' <NEW_LINE> assert humantime(1e-12) == u'0.001 ns' <NEW_LINE> for val in ('abc', [], {'x': 5}): <NEW_LINE> <INDENT> with pytest.raises(ValueError) as context: <NEW_LINE> <INDENT> humantime(val) <NEW_LINE> <DEDENT> assert 'Input must be numeric' in str(context.value)
Tests the humantime utility
625941b69f2886367277a6a9
def user_data(self, access_token, *args, **kwargs): <NEW_LINE> <INDENT> data = {'method': 'users.getInfo', 'session_key': access_token} <NEW_LINE> return mailru_api(self, data)[0]
Return user data from Mail.ru REST API
625941b6287bf620b61d3888
def errMsg(msg): <NEW_LINE> <INDENT> print("ftclient > received following message from ftserver: \n\t"+msg)
Print error message received.
625941b6be383301e01b52a5
def _load_turkeys(self,**kwargs): <NEW_LINE> <INDENT> columns_rn = kwargs.pop('column_renamer',None) <NEW_LINE> df = pd.read_csv(self.fname,**kwargs) <NEW_LINE> if columns_rn is not None: <NEW_LINE> <INDENT> df = df.rename(columns = columns_rn) <NEW_LINE> <DEDENT> self.df = self._validate_columns(df,columns_rn) <NEW_LINE> self._process()
loads and does some initial cleaning
625941b6cad5886f8bd26dfa
def main(): <NEW_LINE> <INDENT> state_map = dict( update='fabric-local-modify' ) <NEW_LINE> argument_spec = dict( pn_cliswitch=dict(required=True, type='str'), state=dict(required=False, type='str', choices=state_map.keys(), default='update'), pn_fabric_network=dict(required=False, type='str', choices=['mgmt', 'in-band', 'vmgmt'], default='mgmt'), pn_vlan=dict(required=False, type='str'), pn_control_network=dict(required=False, type='str', choices=['in-band', 'mgmt', 'vmgmt']), pn_fabric_advertisement_network=dict(required=False, type='str', choices=['inband-mgmt', 'inband-only', 'inband-vmgmt', 'mgmt-only']), ) <NEW_LINE> module = AnsibleModule( argument_spec=argument_spec, required_one_of=[['pn_fabric_network', 'pn_vlan', 'pn_control_network', 'pn_fabric_advertisement_network']], ) <NEW_LINE> cliswitch = module.params['pn_cliswitch'] <NEW_LINE> state = module.params['state'] <NEW_LINE> fabric_network = module.params['pn_fabric_network'] <NEW_LINE> vlan = module.params['pn_vlan'] <NEW_LINE> control_network = module.params['pn_control_network'] <NEW_LINE> fabric_adv_network = module.params['pn_fabric_advertisement_network'] <NEW_LINE> command = state_map[state] <NEW_LINE> if vlan: <NEW_LINE> <INDENT> if int(vlan) < 1 or int(vlan) > 4092: <NEW_LINE> <INDENT> module.fail_json( failed=True, msg='Valid vlan range is 1 to 4092' ) <NEW_LINE> <DEDENT> cli = pn_cli(module, cliswitch) <NEW_LINE> cli += ' vlan-show format id no-show-headers' <NEW_LINE> out = run_commands(module, cli)[1].split() <NEW_LINE> if vlan in out and vlan != '1': <NEW_LINE> <INDENT> module.fail_json( failed=True, msg='vlan %s is already in used. Specify unused vlan' % vlan ) <NEW_LINE> <DEDENT> <DEDENT> cli = pn_cli(module, cliswitch) <NEW_LINE> if command == 'fabric-local-modify': <NEW_LINE> <INDENT> cli += ' %s ' % command <NEW_LINE> if fabric_network: <NEW_LINE> <INDENT> cli += ' fabric-network ' + fabric_network <NEW_LINE> <DEDENT> if vlan: <NEW_LINE> <INDENT> cli += ' vlan ' + vlan <NEW_LINE> <DEDENT> if control_network: <NEW_LINE> <INDENT> cli += ' control-network ' + control_network <NEW_LINE> <DEDENT> if fabric_adv_network: <NEW_LINE> <INDENT> cli += ' fabric-advertisement-network ' + fabric_adv_network <NEW_LINE> <DEDENT> <DEDENT> run_cli(module, cli, state_map)
This section is for arguments parsing
625941b6dd821e528d63afc3
def __init__(self, port='/dev/ttyS0', speed=19200): <NEW_LINE> <INDENT> self.serial = serial.Serial(port, speed, timeout=2.0) <NEW_LINE> self.reverse_range = False <NEW_LINE> self.communication_mode(computer_control=True) <NEW_LINE> self.type = '422' <NEW_LINE> if self.comm('SQA') == '1': <NEW_LINE> <INDENT> self.series = '400' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.series = '125' <NEW_LINE> <DEDENT> self.state = {'emission': 'Unknown', 'sem': 'Unknown'}
Initialize the module
625941b6b5575c28eb68de15
def _get_attrs(self, model, path): <NEW_LINE> <INDENT> for attr, details in model._attribute_map.items(): <NEW_LINE> <INDENT> conditions = [] <NEW_LINE> full_path = '.'.join([self.parser._request_param['name'], path, attr]) <NEW_LINE> conditions.append( model._validation.get(attr, {}).get('readonly')) <NEW_LINE> conditions.append( model._validation.get(attr, {}).get('constant')) <NEW_LINE> conditions.append(any([i for i in pformat.IGNORE_PARAMETERS if i in full_path])) <NEW_LINE> conditions.append(details['type'][0] in ['{']) <NEW_LINE> if not any(conditions): <NEW_LINE> <INDENT> yield attr, details
Get all the attributes from the complex parameter model that should be exposed as command line arguments. :param class model: The parameter model class. :param str path: Request parameter namespace.
625941b6167d2b6e312189b6
def register_results_in_gis(self): <NEW_LINE> <INDENT> for mkey, lst in self.output_maplist.iteritems(): <NEW_LINE> <INDENT> strds_name = self.out_map_names[mkey] <NEW_LINE> if strds_name is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.gis.register_maps_in_stds(mkey, strds_name, lst, 'strds', self.temporal_type) <NEW_LINE> <DEDENT> if self.drainage_sim and self.drainage_out: <NEW_LINE> <INDENT> self.gis.register_maps_in_stds("Itzï drainage results", self.drainage_out, self.vector_drainage_maplist, 'stvds', self.temporal_type) <NEW_LINE> <DEDENT> return self
Register the generated maps in the temporal database Loop through output names if no output name is provided, don't do anything if name is populated, create a strds of the right temporal type and register the corresponding listed maps
625941b663b5f9789fde6efd
def __process_template(self, input, pm, p): <NEW_LINE> <INDENT> with open(input, "r", encoding=DEFAULT_ENCODING) as t: <NEW_LINE> <INDENT> repl = partial(pre_repl, self, p) <NEW_LINE> self.__process_template_loop(pm, repl, t, p)
Template reading and transformation initialization.
625941b650485f2cf553cbb1
def postfix_eval(postfix_expr): <NEW_LINE> <INDENT> operand_stack = Stack() <NEW_LINE> token_list = postfix_expr.split() <NEW_LINE> for token in token_list: <NEW_LINE> <INDENT> if token in '0123456789': <NEW_LINE> <INDENT> operand_stack.push(int(token)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> op2 = operand_stack.pop() <NEW_LINE> op1 = operand_stack.pop() <NEW_LINE> result = do_math(token, op1, op2) <NEW_LINE> operand_stack.push(result) <NEW_LINE> <DEDENT> <DEDENT> return operand_stack.pop()
后缀表达式求值 如果 token 是操作数,将其从字符串转换为整数,并将值压到operandStack。 如果 token 是运算符*,/,+或-,它将需要两个操作数。弹出operandStack 两次。 第一个弹出的是第二个操作数,第二个弹出的是第一个操作数。执行算术运算后, 将结果压到操作数栈中。 :param postfix_expr: 这里的后缀表达式是一个由空格分隔的标记(token)字符串
625941b64527f215b584c273
def with_sufficient_decrease_coefficient(self, sufficient_decrease_coefficient): <NEW_LINE> <INDENT> if sufficient_decrease_coefficient <= 0.0 or sufficient_decrease_coefficient >= 1.0: <NEW_LINE> <INDENT> raise Exception("sufficient decrease coeff must be in (0,1)") <NEW_LINE> <DEDENT> self.__sufficient_decrease_coefficient = sufficient_decrease_coefficient <NEW_LINE> return self
Specify the sufficient decrease coefficient of the algorithm :param sufficient_decrease_coefficient: sufficient decrease coefficient :returns: The current object
625941b61f5feb6acb0c496e
def display(self, option): <NEW_LINE> <INDENT> options = ['auto', 'all'] <NEW_LINE> if option not in options: <NEW_LINE> <INDENT> raise Exception("Display option must be one of %s" % ','.join(repr(el) for el in options)) <NEW_LINE> <DEDENT> self._display = option <NEW_LINE> return self
Sets the display policy of the Layout before returning self
625941b63539df3088e2e163
def get_ssl_certificates(self): <NEW_LINE> <INDENT> INVALID_SSL_CERTIFICATES = "Invalid ssl_certificates for {network_location}!" <NEW_LINE> INVALID_PARENT_SSL_CERTIFICATES_DIRECTORY = "Invalid parent_ssl_certificates_directory for {network_location}!" <NEW_LINE> ssl_certificates = self.configuration.get("ssl_certificates") <NEW_LINE> if ssl_certificates is not None: <NEW_LINE> <INDENT> if self.parent_ssl_certificates_directory is not None: <NEW_LINE> <INDENT> parent_ssl_certificates_directory = os.path.abspath(self.parent_ssl_certificates_directory) <NEW_LINE> if os.path.isdir(parent_ssl_certificates_directory): <NEW_LINE> <INDENT> ssl_certificates = os.path.join(parent_ssl_certificates_directory, ssl_certificates) <NEW_LINE> if not os.path.isfile(ssl_certificates): <NEW_LINE> <INDENT> raise tuf.InvalidConfigurationError(INVALID_SSL_CERTIFICATES.format( network_location=self.network_location)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise tuf.InvalidConfigurationError( INVALID_PARENT_SSL_CERTIFICATES_DIRECTORY.format( network_location=self.network_location)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ssl_certificates
<Purpose> Get any PEM certificate bundle. <Arguments> None. <Exceptions> <Side Effects> <Returns>
625941b63346ee7daa2b2b81
def __getitem__(self, key): <NEW_LINE> <INDENT> return self._squares[key]
TESTS:: sage: from sage.combinat.knutson_tao_puzzles import KnutsonTaoPuzzleSolver sage: ps = KnutsonTaoPuzzleSolver("H") sage: puzzle = ps('0101','1001')[0] sage: puzzle {(1, 1): 0/1\10, (1, 2): 1/\1 10\/0, (1, 3): 0/\10 1\/1, (1, 4): 1/\1 10\/0, (2, 2): 0/0\0, (2, 3): 1/\0 0\/1, (2, 4): 0/\0 0\/0, (3, 3): 1/1\1, (3, 4): 0/\0 1\/10, (4, 4): 10/0\1} sage: puzzle[(1,2)] # indirect doctest 1/\1 10\/0
625941b6bf627c535bc12fef
def _exons_and_cds_from_enst(self): <NEW_LINE> <INDENT> if self.version == 'grch37': <NEW_LINE> <INDENT> server = "https://grch37.rest.ensembl.org" <NEW_LINE> <DEDENT> elif self.version == 'grch38': <NEW_LINE> <INDENT> server = "https://rest.ensembl.org" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Please check ENSEMBL version, only grch37 or grch38 are allowed') <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> ext = "/overlap/id/%s?feature=transcript;feature=exon;feature=CDS" % self.enst <NEW_LINE> r = requests.get(server + ext, headers={"Content-Type": "application/json"}) <NEW_LINE> if not r.ok: <NEW_LINE> <INDENT> r.raise_for_status() <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> decoded = r.json() <NEW_LINE> trasc = list(filter(lambda x: x['feature_type'] == 'transcript' and x['transcript_id'] == self.enst, decoded)) <NEW_LINE> self.transcript_obj = Transcript(trasc[0]) <NEW_LINE> self.common_name = trasc[0]['external_name'].split('-')[0] <NEW_LINE> self.protein_coding = trasc[0]['biotype'] <NEW_LINE> exons = list(filter(lambda x: x['feature_type'] == 'exon' and x['Parent'] == self.enst, decoded)) <NEW_LINE> for i in exons: <NEW_LINE> <INDENT> self.transcript_obj.add_exon(i) <NEW_LINE> <DEDENT> cds = list(filter(lambda x: x['feature_type'] == 'cds' and x['Parent'] == self.enst, decoded)) <NEW_LINE> for i in cds: <NEW_LINE> <INDENT> self.transcript_obj.add_cds(i) <NEW_LINE> <DEDENT> return
This method retrieves all EXONS and CDS regions from ENST transcript name creating a Transcript object
625941b6004d5f362079a150
@before(Swimming(actor)) <NEW_LINE> def before_swimming_default(actor, ctxt) : <NEW_LINE> <INDENT> raise AbortAction("There's no place to swim.", actor=actor)
By default, you can't swim.
625941b6566aa707497f4393
def propagate(parent): <NEW_LINE> <INDENT> query = (table.parent == parent) & (table.inherited == True) <NEW_LINE> rows = db(query).select(*fields) <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> update_location_tree(row, propagating=True) <NEW_LINE> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> current.log.error("Cannot propagate inherited latlon to child %s of location ID %s: too much recursion" % (row.id, parent))
Propagate Lat/Lon down to any Features which inherit from this one @param parent: gis_location id of parent
625941b63539df3088e2e164
def authn_response(self, context, binding): <NEW_LINE> <INDENT> if not context.request["SAMLResponse"]: <NEW_LINE> <INDENT> satosa_logging(logger, logging.DEBUG, "Missing Response for state", context.state) <NEW_LINE> raise SATOSAAuthenticationError(context.state, "Missing Response") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> authn_response = self.sp.parse_authn_request_response( context.request["SAMLResponse"], binding, outstanding=self.outstanding_queries) <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> satosa_logging(logger, logging.DEBUG, "Failed to parse authn request for state", context.state, exc_info=True) <NEW_LINE> raise SATOSAAuthenticationError(context.state, "Failed to parse authn request") from err <NEW_LINE> <DEDENT> if self.sp.config.getattr('allow_unsolicited', 'sp') is False: <NEW_LINE> <INDENT> req_id = authn_response.in_response_to <NEW_LINE> if req_id not in self.outstanding_queries: <NEW_LINE> <INDENT> errmsg = "No request with id: {}".format(req_id), <NEW_LINE> satosa_logging(logger, logging.DEBUG, errmsg, context.state) <NEW_LINE> raise SATOSAAuthenticationError(context.state, errmsg) <NEW_LINE> <DEDENT> del self.outstanding_queries[req_id] <NEW_LINE> <DEDENT> if context.state[self.name]["relay_state"] != context.request["RelayState"]: <NEW_LINE> <INDENT> satosa_logging(logger, logging.DEBUG, "State did not match relay state for state", context.state) <NEW_LINE> raise SATOSAAuthenticationError(context.state, "State did not match relay state") <NEW_LINE> <DEDENT> context.decorate(Context.KEY_BACKEND_METADATA_STORE, self.sp.metadata) <NEW_LINE> del context.state[self.name] <NEW_LINE> return self.auth_callback_func(context, self._translate_response(authn_response, context.state))
Endpoint for the idp response :type context: satosa.context,Context :type binding: str :rtype: satosa.response.Response :param context: The current context :param binding: The saml binding type :return: response
625941b6d164cc6175782b66
def retry(max_retries: int =5, delay: (int, float) =0, step: (int, float) =0, exceptions: (BaseException, tuple, list) =BaseException, sleep=time.sleep, callback=None, validate=None): <NEW_LINE> <INDENT> def wrapper(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def _wrapper(*args, **kwargs): <NEW_LINE> <INDENT> nonlocal delay, step, max_retries <NEW_LINE> func_ex = StopRetry <NEW_LINE> while max_retries > 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = func(*args, **kwargs) <NEW_LINE> if callable(validate) and validate(result) is False: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> except exceptions as ex: <NEW_LINE> <INDENT> func_ex = ex <NEW_LINE> if callable(callback) and callback(ex) is True: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> max_retries -= 1 <NEW_LINE> if delay > 0 or step > 0: <NEW_LINE> <INDENT> sleep(delay) <NEW_LINE> delay += step <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise func_ex <NEW_LINE> <DEDENT> <DEDENT> return _wrapper <NEW_LINE> <DEDENT> return wrapper
函数执行出现异常时自动重试的简单装饰器。 :param max_retries: 最多重试次数。 :param delay: 每次重试的延迟,单位秒。 :param step: 每次重试后延迟递增,单位秒。 :param exceptions: 触发重试的异常类型,单个异常直接传入异常类型,多个异常以tuple或list传入。 :param sleep: 实现延迟的方法,默认为time.sleep。 在一些异步框架,如tornado中,使用time.sleep会导致阻塞,可以传入自定义的方法来实现延迟。 自定义方法函数签名应与time.sleep相同,接收一个参数,为延迟执行的时间。 :param callback: 回调函数,函数签名应接收一个参数,每次出现异常时,会将异常对象传入。 可用于记录异常日志,中断重试等。 如回调函数正常执行,并返回True,则表示告知重试装饰器异常已经处理,重试装饰器终止重试,并且不会抛出任何异常。 如回调函数正常执行,没有返回值或返回除True以外的结果,则继续重试。 如回调函数抛出异常,则终止重试,并将回调函数的异常抛出。 :param validate: 验证函数,用于验证执行结果,并确认是否继续重试。 函数签名应接收一个参数,每次被装饰的函数完成且未抛出任何异常时,调用验证函数,将执行的结果传入。 如验证函数正常执行,且返回False,则继续重试,即使被装饰的函数完成且未抛出任何异常。 如回调函数正常执行,没有返回值或返回除False以外的结果,则终止重试,并将函数执行结果返回。 如验证函数抛出异常,且异常属于被重试装饰器捕获的类型,则继续重试。 如验证函数抛出异常,且异常不属于被重试装饰器捕获的类型,则将验证函数的异常抛出。 :return: 被装饰函数的执行结果。
625941b6b545ff76a8913c38
def sparsify(self, modulo, compress_ids=False): <NEW_LINE> <INDENT> compartments = self.compartment_index <NEW_LINE> root = self.root <NEW_LINE> keep = {} <NEW_LINE> ct = 0 <NEW_LINE> for i, c in six.iteritems(compartments): <NEW_LINE> <INDENT> pid = c[NODE_PN] <NEW_LINE> cid = c[NODE_ID] <NEW_LINE> ctype = c[NODE_TYPE] <NEW_LINE> if pid < 0 or len(c[NODE_CHILDREN]) != 1 or pid == root[NODE_ID] or ctype == Morphology.SOMA: <NEW_LINE> <INDENT> keep[cid] = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> keep[cid] = (ct % modulo) == 0 <NEW_LINE> <DEDENT> ct += 1 <NEW_LINE> <DEDENT> for i, c in six.iteritems(compartments): <NEW_LINE> <INDENT> comp_id = c[NODE_ID] <NEW_LINE> if keep[comp_id] is False: <NEW_LINE> <INDENT> parent_id = c[NODE_PN] <NEW_LINE> while keep[parent_id] is False: <NEW_LINE> <INDENT> parent_id = compartments[parent_id][NODE_PN] <NEW_LINE> <DEDENT> for child_id in c[NODE_CHILDREN]: <NEW_LINE> <INDENT> compartments[child_id][NODE_PN] = parent_id <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sparsified_compartments = {k: v for k, v in six.iteritems(compartments) if keep[k]} <NEW_LINE> if compress_ids: <NEW_LINE> <INDENT> ids = sorted(sparsified_compartments.keys(), key=lambda x: int(x)) <NEW_LINE> id_hash = {fid: str(i + 1) for i, fid in enumerate(ids)} <NEW_LINE> id_hash[-1] = -1 <NEW_LINE> out_compartments = {} <NEW_LINE> for cid, compartment in six.iteritems(sparsified_compartments): <NEW_LINE> <INDENT> compartment[NODE_ID] = id_hash[cid] <NEW_LINE> compartment[NODE_PN] = id_hash[compartment[NODE_PN]] <NEW_LINE> out_compartments[compartment[NODE_ID]] = compartment <NEW_LINE> <DEDENT> return Morphology(compartment_index=out_compartments) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Morphology(compartment_index=sparsified_compartments)
Return a new Morphology object that has a given number of non-leaf, non-root nodes removed. IDs can be reassigned so as to be continuous. Parameters ---------- modulo: int keep 1 out of every modulo nodes. compress_ids: boolean Reassign ids so that ids are continuous (no missing id numbers). Returns ------- Morphology A new morphology instance
625941b6ab23a570cc24ff99
def writeInteger(hcomm, variable, val_to_write, nbuff=NONE, from1=NONE, to1=NONE, from2=NONE, to2=NONE, wait=SYNCHRONOUS): <NEW_LINE> <INDENT> val = ctypes.c_int(val_to_write) <NEW_LINE> acs.acsc_WriteInteger(hcomm, nbuff, variable.encode(), from1, to1, from2, to2, p(val), wait)
Writes an integer variable to the controller.
625941b667a9b606de4a7cd6
def getenvirodata(): <NEW_LINE> <INDENT> ht=sense.get_temperature() <NEW_LINE> pt=sense.get_temperature_from_pressure() <NEW_LINE> hu=sense.get_humidity() <NEW_LINE> pr=sense.get_pressure() <NEW_LINE> if pr<1: <NEW_LINE> <INDENT> pr=sense.get_pressure() <NEW_LINE> pt=sense.get_temperature_from_pressure() <NEW_LINE> <DEDENT> return ht,pt,hu,pr
Gets temp from humidity and pressure, and humidity and pressure
625941b6627d3e7fe0d68c67
def _scrub_json(self, content): <NEW_LINE> <INDENT> regex = re.compile(r'("%s":\s*"[^"]*").*?("%s":\s*"[^"]*")' % (_QUESTION_FIELD, _QUERY_FIELD), re.DOTALL) <NEW_LINE> return "[" + ",".join(["{" + m.group(1) + "," + m.group(2) + "}" for m in regex.finditer(content)]) + "]"
Reduce JSON by filtering out only the fields of interest.
625941b615fb5d323cde0922
def LML(self,hyperparams): <NEW_LINE> <INDENT> LML = self._LML_covar(hyperparams) <NEW_LINE> if self.prior!=None: <NEW_LINE> <INDENT> LML += self.prior.LML(hyperparams) <NEW_LINE> <DEDENT> return LML
calculate the log marginal likelihood for the given logtheta Input: hyperparams: dictionary
625941b6a17c0f6771cbde6d
def load(self, save_path): <NEW_LINE> <INDENT> self.obs_t = np.load(save_path + '.obs_t.npy') <NEW_LINE> self.action_t = np.load(save_path + '.action_t.npy') <NEW_LINE> self.reward = np.load(save_path + '.reward.npy') <NEW_LINE> self.obs_tp1 = np.load(save_path + '.obs_tp1.npy') <NEW_LINE> self.done = np.load(save_path + '.done.npy') <NEW_LINE> (self._maxsize, self._size, self._current_idx, self._next_idx, self._batch_size) = np.load(save_path + '.config.npy')
Load parameters for the replay buffer.
625941b638b623060ff0ac08
@app.route('/') <NEW_LINE> def main_page(): <NEW_LINE> <INDENT> return render_template('home.html')
renders main page
625941b6b5575c28eb68de16
def store(request): <NEW_LINE> <INDENT> ordering = "" <NEW_LINE> game_list = None <NEW_LINE> if not GameList.objects.all().filter(active=True): <NEW_LINE> <INDENT> store_empty = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> store_empty = False <NEW_LINE> if request.GET.get("search"): <NEW_LINE> <INDENT> game_list = GameList.objects.filter(game_name__icontains=request.GET.get("search"), active=True) <NEW_LINE> <DEDENT> elif request.GET.get("order"): <NEW_LINE> <INDENT> order = request.GET.get("order") <NEW_LINE> if order == "title": <NEW_LINE> <INDENT> ordering = "game_name" <NEW_LINE> <DEDENT> elif order == "genre": <NEW_LINE> <INDENT> ordering = "category" <NEW_LINE> <DEDENT> elif order == "popularity": <NEW_LINE> <INDENT> ordering = "-num_of_purchases" <NEW_LINE> <DEDENT> elif order == "release": <NEW_LINE> <INDENT> ordering = "-time_added" <NEW_LINE> <DEDENT> elif order == "price": <NEW_LINE> <INDENT> ordering = "price" <NEW_LINE> <DEDENT> if ordering: <NEW_LINE> <INDENT> game_list = GameList.objects.order_by(ordering).filter(active=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> game_list = GameList.objects.all().filter(active=True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> game_list = GameList.objects.all().filter(active=True) <NEW_LINE> <DEDENT> <DEDENT> return render(request, "game_server/store.html", {'games': game_list, 'store_empty': store_empty})
Main store view which lists all games available in the store and provides search and sorting functionalities
625941b63346ee7daa2b2b82
def skeleton(img, kernel=None): <NEW_LINE> <INDENT> if kernel is None: <NEW_LINE> <INDENT> kernel = np.ones((3, 3)) <NEW_LINE> <DEDENT> img = img.astype(np.int) <NEW_LINE> erode_ks = [] <NEW_LINE> while np.any(img): <NEW_LINE> <INDENT> erode_ks.append(img) <NEW_LINE> img = morph_binary(img, kernel, MorphMethod.Erode) <NEW_LINE> <DEDENT> s_ks = [] <NEW_LINE> for k, eroded in enumerate(erode_ks): <NEW_LINE> <INDENT> s_k = eroded - morph_binary(eroded, kernel, MorphMethod.Open) <NEW_LINE> s_ks.append((s_k, k)) <NEW_LINE> img = np.bitwise_or(img, s_k) <NEW_LINE> <DEDENT> return img, s_ks
Perform skeleton algorithm :param img: binary image :param kernel: kernel :return: skeleton of image, and list of (s_k, k) pair for reconstructing
625941b68e7ae83300e4ade4
def llh2ecef(llh): <NEW_LINE> <INDENT> lat = llh[0] * DTOR <NEW_LINE> lng = llh[1] * DTOR <NEW_LINE> alt = llh[2] <NEW_LINE> slat = math.sin(lat) <NEW_LINE> slng = math.sin(lng) <NEW_LINE> clat = math.cos(lat) <NEW_LINE> clng = math.cos(lng) <NEW_LINE> d = math.sqrt(1 - (slat * slat * WGS84_ECC_SQ)) <NEW_LINE> rn = WGS84_A / d <NEW_LINE> x = (rn + alt) * clat * clng <NEW_LINE> y = (rn + alt) * clat * slng <NEW_LINE> z = (rn * (1 - WGS84_ECC_SQ) + alt) * slat <NEW_LINE> return (x, y, z)
Converts from WGS84 lat/lon/height to ellipsoid-earth ECEF
625941b676e4537e8c351490
def ingest_api_get(): <NEW_LINE> <INDENT> funcs = { "lev0":update_lev0_parameters, "lev1":update_lev1_parameters, "lev2":update_lev2_parameters } <NEW_LINE> reqDict = request.args.to_dict() <NEW_LINE> parsedParams = parse_params(reqDict) <NEW_LINE> reingest = parsedParams.get('reingest', 'False') <NEW_LINE> testonly = parsedParams.get('testonly', 'False') <NEW_LINE> log.info(f'ingest_api_get: input parameters - {reqDict}') <NEW_LINE> log.info(f'ingest_api_get: parsed parameters - {parsedParams}') <NEW_LINE> if parsedParams['apiStatus'] == 'ERROR': <NEW_LINE> <INDENT> notify_error("API_STATUS_ERROR", json.dumps(parsedParams, indent=4), parsedParams.get('instrument')) <NEW_LINE> <DEDENT> elif parsedParams['apiStatus'] != 'ERROR' and testonly.lower != 'true': <NEW_LINE> <INDENT> if 'status' in parsedParams.keys() and parsedParams['status'] == 'ERROR': <NEW_LINE> <INDENT> notify_error("IPAC_STATUS_ERROR", json.dumps(parsedParams, indent=4), parsedParams.get('instrument')) <NEW_LINE> <DEDENT> dbname = 'koa' <NEW_LINE> log.info(f'ingest_api_get: using database {dbname}') <NEW_LINE> conn = db_conn('./config.live.ini') <NEW_LINE> parsedParams = funcs[parsedParams['ingesttype']](parsedParams, reingest, CONFIG, conn, dbUser=dbname) <NEW_LINE> log.info(f'ingest_api_get: returned parameters - {parsedParams}') <NEW_LINE> if 'status' in parsedParams.keys() and parsedParams['apiStatus'] == 'ERROR': <NEW_LINE> <INDENT> notify_error("DATABASE_ERROR", json.dumps(parsedParams, indent=4), parsedParams.get('instrument')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if parsedParams['ingesttype'] == 'lev0': <NEW_LINE> <INDENT> notify_pi(parsedParams) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return jsonify(parsedParams)
API entry point from koa_rti_main.ingest_api route.
625941b6a05bb46b383ec646
def izip(*iterables): <NEW_LINE> <INDENT> return imap(None, *iterables)
zip(iter1 [,iter2 [...]]) --> zip object Return a zip object whose .__next__() method returns a tuple where the i-th element comes from the i-th iterable argument. The .__next__() method continues until the shortest iterable in the argument sequence is exhausted and then it raises StopIteration.
625941b63317a56b86939a83
def user_joined(self, user, channel): <NEW_LINE> <INDENT> pass
JOIN event :param user: Joining users nickname :param channel: Channel the user parted from :return:
625941b68e71fb1e9831d5c6
def _get_new_urls(self, page_url, soup): <NEW_LINE> <INDENT> new_urls = set() <NEW_LINE> links = soup.find_all('a',href=re.compile(r'/item/\S+/\d+')) <NEW_LINE> for link in links: <NEW_LINE> <INDENT> new_url = link['href'] <NEW_LINE> new_full_url = parse.urljoin(page_url, new_url) <NEW_LINE> new_urls.add(new_full_url) <NEW_LINE> <DEDENT> return new_urls
抽取新URL集合 :param page_url: 下载页面的URL :param soup: soup :return:返回新的URL集合
625941b6fff4ab517eb2f252
def test_SQLAlchemy_price_changed(): <NEW_LINE> <INDENT> conn = SQLAlchemyAdapter('sqlite', ':memory:') <NEW_LINE> conn.session.add(EbayItem( itemId='id', url='url', price_amount=10.99, price_currency='EUR', title='title', expire=datetime(2000, 1, 1), category='category' )) <NEW_LINE> assert conn.price_changed(dict(id='id', price_amount=11.99), EbayItem)
Tests SQLAlchemyAdapter.price_changed() propertly returns True passing a different price
625941b6099cdd3c635f0a76
def reduce(v, axis, dtype_fn, local_reduce_fn, accumulate_fn, fn_kw=None, tile_hint=None): <NEW_LINE> <INDENT> if fn_kw is None: fn_kw = {} <NEW_LINE> varname = make_var() <NEW_LINE> assert not 'axis' in fn_kw, '"axis" argument is reserved.' <NEW_LINE> fn_kw['axis'] = axis <NEW_LINE> reduce_op = LocalReduceExpr(fn=local_reduce_fn, deps=[ LocalInput(idx='extent'), LocalInput(idx=varname), ], kw=fn_kw) <NEW_LINE> return ReduceExpr(children=ListExpr(vals=[v]), child_to_var=[varname], axis=axis, dtype_fn=dtype_fn, op=reduce_op, accumulate_fn=accumulate_fn, tile_hint=tile_hint)
Reduce ``v`` over axis ``axis``. The resulting array should have a datatype given by ``dtype_fn(input).`` For each tile of the input ``local_reduce_fn`` is called with arguments: (tiledata, axis, extent). The output is combined using ``accumulate_fn``. :param v: `Expr` :param axis: int or None :param dtype_fn: Callable: fn(array) -> `numpy.dtype` :param local_reduce_fn: Callable: fn(extent, data, axis) :param accumulate_fn: Callable: fn(old_v, update_v) -> new_v :rtype: `Expr`
625941b696565a6dacc8f4ee
def __init__(self, config, hub_addr): <NEW_LINE> <INDENT> super(NetClient, self).__init__() <NEW_LINE> self._port = config['port'] <NEW_LINE> self._hub_addr = hub_addr <NEW_LINE> self._conn_timeout = config['connect_timeout'] <NEW_LINE> self._conn_max_attempts = config['connect_max_attempts'] <NEW_LINE> self._num_clients = 0 <NEW_LINE> self._client_sessions = {} <NEW_LINE> self._session_id = 1 <NEW_LINE> self.protocols = emews.base.baseserv.BaseServ.protocols <NEW_LINE> self.hub_query = self._hub_query
Constructor.
625941b607d97122c41786a4
def find_root(tower): <NEW_LINE> <INDENT> deps = { dep for (prog, (weight, holding)) in tower.items() for dep in holding } <NEW_LINE> for (prog, (weight, holding)) in tower.items(): <NEW_LINE> <INDENT> if prog not in deps: <NEW_LINE> <INDENT> return prog
Every program in the tower lists its dependents. Look at the list and determine which program wasn't listed as dependent. That's the root.
625941b65166f23b2e1a4f72
def __init__(self): <NEW_LINE> <INDENT> super(Storage, self).__init__() <NEW_LINE> self._os = None
在基础组建类的基础上添加了os属性,即在闪存中存储的os
625941b629b78933be1e54d3
def andCriterion(self, criterion): <NEW_LINE> <INDENT> assert isinstance(criterion, self.__class__) <NEW_LINE> self.__clauses.append(criterion) <NEW_LINE> self.__conjunctions.append(SQLConstants.AND) <NEW_LINE> return self
Append an AND Criterion onto this Criterion's list.
625941b607f4c71912b112a0
def simple_closed_path(listPts): <NEW_LINE> <INDENT> p0 = get_min_index(listPts) <NEW_LINE> sorted_list = sorted(listPts, key=lambda pair: theta(listPts[p0],pair)) <NEW_LINE> p0 = get_min_index(sorted_list) <NEW_LINE> return (sorted_list, p0)
Find simple closed path to obtain sequential ordering of the list of input points such that p0 has the minimum y-coord.
625941b632920d7e50b27fe5
def save(self): <NEW_LINE> <INDENT> with open(self.augmentation_file, 'w') as f: <NEW_LINE> <INDENT> json.dump(self.dump(), f)
Save augmentation dict to JSON file at augmentation_file
625941b6796e427e537b03db
def test_reparent_root(): <NEW_LINE> <INDENT> newparent = '/tmp/fuzzy' <NEW_LINE> oldpath = '/pub' <NEW_LINE> assert reparent(newparent, oldpath) == os.path.join(newparent, '.' + oldpath)
test the reparent to a root path
625941b66fece00bbac2d554
def __dabrirfichero(self): <NEW_LINE> <INDENT> filterlist = "" <NEW_LINE> for fmt in ["dro"]: <NEW_LINE> <INDENT> filterlist = filterlist + "%s files (*.%s);;" % (fmt, fmt.lower()) <NEW_LINE> <DEDENT> nombrefichero = QtGui.QFileDialog.getOpenFileName(filter=filterlist, parent=self, caption="Dialogo abrir fichero") <NEW_LINE> filename = str(nombrefichero) <NEW_LINE> if filename: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__contenido = self.__gestorsalida.cargar(filename) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> QErrorMessage(self,"Error").message(u"Error en la carga de fichero, Probablemente versión anterior") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__myUpdate()
Pregunta al usuario que fichero cargar
625941b6004d5f362079a151
def mutation(self): <NEW_LINE> <INDENT> for i in range(self.POP_SIZE): <NEW_LINE> <INDENT> for j in range(self.G_LENGTH): <NEW_LINE> <INDENT> if random.random() <= self.M_RATE: <NEW_LINE> <INDENT> if self.gene[i][j] == 1: <NEW_LINE> <INDENT> self.gene[i][j] = 0 <NEW_LINE> <DEDENT> elif self.gene[i][j] == 0: <NEW_LINE> <INDENT> self.gene[i][j] = 1
突然変異 0〜1の乱数を発生させ、その値が M_RATE 以下ならば 遺伝子の値をランダムに変える (0ならば1、1ならば0) :return: None
625941b626068e7796caeaf2
def __init__(self,make,model,year): <NEW_LINE> <INDENT> super().__init__(make,model,year) <NEW_LINE> self.battery_size = 60
初始化父类属性
625941b6e8904600ed9f1d42
def splitInt16(int16): <NEW_LINE> <INDENT> int16 = np.uint16(int16) <NEW_LINE> bits = np.binary_repr(int16,16) <NEW_LINE> top = int(bits[:8],2) <NEW_LINE> bot = int(bits[8:],2) <NEW_LINE> return np.uint8(top),np.uint8(bot)
Take in a 16 bit integer, and return the top and bottom 8 bit integers Maybe not the most effecient? My best attempt based on my knowledge of python
625941b62c8b7c6e89b355dd
def __init__(self, temboo_session): <NEW_LINE> <INDENT> super(ListNotifications, self).__init__(temboo_session, '/Library/Twilio/Calls/ListNotifications')
Create a new instance of the ListNotifications Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
625941b6d18da76e235322eb
def c2s_get_active_open_state(self): <NEW_LINE> <INDENT> c2s = active_pb2.c2s_get_active_open_state() <NEW_LINE> return c2s
获取开启的活动 :param: 不需要填写参数
625941b676d4e153a657e949
def mySqrt(self, x): <NEW_LINE> <INDENT> if x == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> l, r = 1, x <NEW_LINE> while l +1 < r: <NEW_LINE> <INDENT> mid = l + (r-l)/2 <NEW_LINE> if mid*mid == x: <NEW_LINE> <INDENT> return mid <NEW_LINE> <DEDENT> elif mid < x / mid: <NEW_LINE> <INDENT> l = mid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = mid <NEW_LINE> <DEDENT> <DEDENT> return r if r*r <= x else l
:type x: int :rtype: int
625941b676d4e153a657e94a
def get_matching_demo(time_to_match, timeout=10, acceptable_margin=10): <NEW_LINE> <INDENT> os.chdir(config.csgo_demos_path) <NEW_LINE> latest_demo = None <NEW_LINE> found = False <NEW_LINE> startTime = time.time() <NEW_LINE> time_is_out = False <NEW_LINE> while not found and not time_is_out: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> latest_demo = max( glob('*.dem'.format(config.video_format)), key=os.path.getctime ) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print("no demos found.") <NEW_LINE> <DEDENT> if latest_demo: <NEW_LINE> <INDENT> latest_demo = os.path.join(config.csgo_demos_path, latest_demo) <NEW_LINE> if time_to_match - acceptable_margin <= os.path.getctime(latest_demo) <= time_to_match + acceptable_margin : <NEW_LINE> <INDENT> found = True <NEW_LINE> <DEDENT> <DEDENT> currentTime = time.time() <NEW_LINE> if currentTime > startTime + timeout: <NEW_LINE> <INDENT> time_is_out = True <NEW_LINE> <DEDENT> <DEDENT> return latest_demo
Return the closest demo to the time_to_match within an acceptable_margin.
625941b6adb09d7d5db6c5ad
def check_rule(self, myrule, mypar='Common'): <NEW_LINE> <INDENT> return self.mgmt.tm.ltm.rules.rule.exists(name=myrule, partition=mypar)
Simple function to check if certain iRule already exists
625941b6a219f33f3462878f
def compose(self, contract_list, new_name=None, composition_mapping=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> contracts = set(contract_list) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> contracts = set() <NEW_LINE> contracts.add(contract_list) <NEW_LINE> <DEDENT> if not contracts: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> contracts.add(self) <NEW_LINE> if composition_mapping is None: <NEW_LINE> <INDENT> composition_mapping = CompositionMapping(contracts, self.context) <NEW_LINE> <DEDENT> if new_name is None: <NEW_LINE> <INDENT> new_name = '-x-'.join([contract.name_attribute.base_name for contract in contracts]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> (new_inputs, new_outputs) = composition_mapping.define_composed_contract_ports() <NEW_LINE> <DEDENT> except PortMappingError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> all_pairs = [(contract.assume_formula, contract.guarantee_formula) for contract in contracts] <NEW_LINE> (part_assumptions, new_guarantees) = reduce(self._reduce_composition_formulae, all_pairs) <NEW_LINE> new_assumptions = Disjunction( part_assumptions, Negation(new_guarantees), merge_literals=False) <NEW_LINE> new_contract = type(self)(new_name, new_inputs, new_outputs, new_assumptions, new_guarantees, self.symbol_set_cls, self.context, saturated=True, infer_ports=False) <NEW_LINE> new_contract.origin_contracts = {contract.name_attribute.unique_name: contract for contract in contracts} <NEW_LINE> return new_contract
Compose the current contract with the one passed as a parameter. The operations to be done are: merge the literals, and merge the formulae. Given a contract C = (A, G) and a contract C1 = (A1, G1), the composition of the two will be a contract C2 = ((A & A1) | !(G & G1) , G & G1) :param other_contract: contract to be used for composition :type other_contract: Contract :param connection_list: optional list of pairs of base_names specifying the ports to be connected :type connection_list: list of tuples (pairs)
625941b663d6d428bbe44309
def __init__(self, name = "Cartesian Axes", scale = 1.0, linewidth = 1.0): <NEW_LINE> <INDENT> super(Axes, self).__init__( name ) <NEW_LINE> self.scale = scale <NEW_LINE> self.linewidth = linewidth <NEW_LINE> self.vertices = self.scale * np.array( [ [0.0, 0.0, 0.0],[1.0, 0.0, 0.0], [0.0, 0.0, 0.0],[0.0, 1.0, 0.0], [0.0, 0.0, 0.0],[0.0, 0.0, 1.0]], dtype = np.float32 ) <NEW_LINE> self.x_cone = Cylinder( "XCone", np.array([0.8,0,0]), np.array([1.1,0,0]), 0.15, 0, 10, color = (1.0, 0, 0, 1.0) ) <NEW_LINE> self.y_cone = Cylinder( "YCone", np.array([0,0.8,0]), np.array([0,1.1,0]), 0.15, 0, 10, color = (0, 1.0, 0, 1.0) ) <NEW_LINE> self.z_cone = Cylinder( "ZCone", np.array([0,0,0.8]), np.array([0,0,1.1]), 0.15, 0, 10, color = (0, 0, 1.0, 1.0) ) <NEW_LINE> self.light_state = GLboolean(0) <NEW_LINE> glGetBooleanv(GL_LIGHTING, self.light_state)
Draw three axes
625941b65fdd1c0f98dc004b
def do_POST(self): <NEW_LINE> <INDENT> pass
post
625941b650812a4eaa59c140
def score_sentiment(self, input_texts): <NEW_LINE> <INDENT> sa = SentimentAnalyzer() <NEW_LINE> if input_texts: <NEW_LINE> <INDENT> sent_scores = self.pool.map(func=sa.score, iterable=input_texts) <NEW_LINE> self.sent_scores = sent_scores <NEW_LINE> return sent_scores <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return input_texts
Invokes the score method on the SimpleSentimentAnalyzer :param input_texts: list<string> text to sentiment score
625941b6796e427e537b03dc
def button_end_action(): <NEW_LINE> <INDENT> number = entry.get() <NEW_LINE> number = int(number) <NEW_LINE> entry.delete(0, tkinter.END) <NEW_LINE> entry.insert(0, f"Your password is: {enter_password(number)}")
End action. This function checks if the player pressed the minus button. If so, it will delete everything in entry and will show the password.
625941b64e4d5625662d41f7
def cwd(pid): <NEW_LINE> <INDENT> if stem.util.proc.is_available(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return stem.util.proc.cwd(pid) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> logging_prefix = 'cwd(%s):' % pid <NEW_LINE> if is_available('pwdx'): <NEW_LINE> <INDENT> results = call(GET_CWD_PWDX % pid, None) <NEW_LINE> if not results: <NEW_LINE> <INDENT> log.debug("%s pwdx didn't return any results" % logging_prefix) <NEW_LINE> <DEDENT> elif results[0].endswith('No such process'): <NEW_LINE> <INDENT> log.debug('%s pwdx processes reported for this pid' % logging_prefix) <NEW_LINE> <DEDENT> elif len(results) != 1 or results[0].count(' ') != 1 or not results[0].startswith('%s: ' % pid): <NEW_LINE> <INDENT> log.debug('%s we got unexpected output from pwdx: %s' % (logging_prefix, results)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return results[0].split(' ', 1)[1].strip() <NEW_LINE> <DEDENT> <DEDENT> if is_available('lsof'): <NEW_LINE> <INDENT> results = call(GET_CWD_LSOF % pid, []) <NEW_LINE> if len(results) >= 2 and results[-1].startswith('n/'): <NEW_LINE> <INDENT> lsof_result = results[-1][1:].strip() <NEW_LINE> if ' ' not in lsof_result: <NEW_LINE> <INDENT> return lsof_result <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.debug('%s we got unexpected output from lsof: %s' % (logging_prefix, results)) <NEW_LINE> <DEDENT> <DEDENT> return None
Provides the working directory of the given process. :param int pid: process id of the process to be queried :returns: **str** with the absolute path for the process' present working directory, **None** if it can't be determined
625941b63617ad0b5ed67d19
def follow_yaw(self, desired_yaw: float, state: State): <NEW_LINE> <INDENT> yaw = state.eta_2[2] <NEW_LINE> yaw_rate = state.v_2[2] <NEW_LINE> feed_forward = self._compute_yaw_feed_forward(yaw_rate) <NEW_LINE> self._yaw_pid.reference = float(desired_yaw) <NEW_LINE> feed_back = float(self._yaw_pid(sys_output=yaw, sys_output_derivative=yaw_rate)) <NEW_LINE> return feed_forward + (self._mr * feed_back)
Compute the torque to apply about the z-axis using a PD controller :param desired_yaw: The reference for the yaw angle [rad] :param state: The current state of the vehicle :return: The torque to apply about the z-axis
625941b6e5267d203edcdabb
def set33SubMatrix(M, i, j, m): <NEW_LINE> <INDENT> for row in range(0, 3): <NEW_LINE> <INDENT> for col in range(0, 3): <NEW_LINE> <INDENT> M[i*3+row, j*3+col] = m[row, col]
Set submatrix of large matrix M at i-th row and j-th column with small 3x3 matrix m
625941b671ff763f4b5494a8