code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def GetHeight(cli): <NEW_LINE> <INDENT> if cli.config.completion_menu_lines and not cli.is_done: <NEW_LINE> <INDENT> buf = cli.current_buffer <NEW_LINE> if UserTypingFilter(cli) or buf.complete_state: <NEW_LINE> <INDENT> return dimension.LayoutDimension( min=cli.config.completion_menu_lines + 1) <NEW_LINE> <DEDENT> <DEDENT> return dimension.LayoutDimension()
Determine the height for the input buffer.
625941b9e1aae11d1e749b31
def get_db_query_4_columns(self, m2o_db, table_name, schema, database): <NEW_LINE> <INDENT> sgdb = m2o_db.m2o_dbtype.name <NEW_LINE> query = """ SELECT * FROM {t_from} WHERE """.format( t_from="information_schema.columns" ) <NEW_LINE> if sgdb != "SQLServer": <NEW_LINE> <INDENT> query += ( " table_schema =" f" '{schema if sgdb == 'PostgreSQL' else database}' AND " ) <NEW_LINE> <DEDENT> return query + f" table_name = '{table_name}' "
Function to obtain the SELECT query for a table columns :param m2o_db: :param table_name: :param schema: :param database: :return:
625941b9956e5f7376d70cf6
def __repr__(self): <NEW_LINE> <INDENT> context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) <NEW_LINE> return '<Twilio.Api.V2010.MediaContext {}>'.format(context)
Provide a friendly representation :returns: Machine friendly representation :rtype: str
625941b9a05bb46b383ec6aa
def _create_placeholders(self): <NEW_LINE> <INDENT> logger.info("Creating model placeholders.") <NEW_LINE> self.inputs_placeholder = tf.placeholder(dtype=tf.float32, shape=[None, self.data_features], name="inputs") <NEW_LINE> logger.info(self.inputs_placeholder) <NEW_LINE> self.labels_placeholder = tf.placeholder(dtype=tf.float32, shape=[None, 1], name="labels") <NEW_LINE> logger.info(self.labels_placeholder) <NEW_LINE> self.learning_rate_input = tf.placeholder(dtype=tf.float32, shape=[], name="learning_rate") <NEW_LINE> logger.info(self.learning_rate_input)
Create the placeholders of the model.
625941b9507cdc57c6306b51
def dis_resnet34(pretrained=False, **kwargs): <NEW_LINE> <INDENT> model = ResNet(BasicBlock, [3, 4, 6, 3], **kwargs) <NEW_LINE> return model
Constructs a ResNet-34 model. Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
625941b90c0af96317bb8067
def __enter__(self) -> Event: <NEW_LINE> <INDENT> if self._cuda: <NEW_LINE> <INDENT> self._event_start = torch.cuda.Event(enable_timing=True) <NEW_LINE> self._event_start.record() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._event_start = datetime.now() <NEW_LINE> <DEDENT> return self
Mark a time. Mimics torch.cuda.Event.
625941b9097d151d1a222cd9
def SearchBarContext(request): <NEW_LINE> <INDENT> return {'searchbar':SearchBarForm()}
Produces a context variable for the searchbar that's available across all pages. :param request: :return:
625941b921bff66bcd6847d2
def test_changelog(self): <NEW_LINE> <INDENT> version_str = "v" + self.init_version <NEW_LINE> hruler = '-' * len(version_str) <NEW_LINE> expect_str = version_str + "\n" + hruler <NEW_LINE> filepath = os.path.join( self.repodir, "cmakelang/doc/changelog.rst") <NEW_LINE> with open(filepath) as infile: <NEW_LINE> <INDENT> content = infile.read() <NEW_LINE> <DEDENT> self.assertIn(expect_str, content)
Ensure that the changelog includes an section for this version
625941b9004d5f362079a1b4
def test_double_add_mac_address(self): <NEW_LINE> <INDENT> self.network.host_add("test1", "00:11:22:33:44:55", "127.0.0.1") <NEW_LINE> self.assertRaises(cinv.netipv4.Error, self.network.host_add, "test2", "00:11:22:33:44:55", "127.0.0.2")
Add two hosts with same mac address
625941b9d6c5a10208143ec5
def deserialize_numpy(self, str, numpy): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> end = 0 <NEW_LINE> start = end <NEW_LINE> end += 1 <NEW_LINE> (self.type,) = _struct_B.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += length <NEW_LINE> self.id_members = str[start:end] <NEW_LINE> _x = self <NEW_LINE> start = end <NEW_LINE> end += 20 <NEW_LINE> (_x.media_x, _x.media_y, _x.sd_x, _x.sd_y, _x.angle,) = _struct_5f.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> pattern = '<%sf'%length <NEW_LINE> start = end <NEW_LINE> end += struct.calcsize(pattern) <NEW_LINE> self.meet_points = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=length) <NEW_LINE> return self <NEW_LINE> <DEDENT> except struct.error as e: <NEW_LINE> <INDENT> raise genpy.DeserializationError(e)
unpack serialized message in str into this message instance using numpy for array types :param str: byte array of serialized message, ``str`` :param numpy: numpy python module
625941b90383005118ecf462
def generate_proto(source, require = True): <NEW_LINE> <INDENT> protroot = os.environ.get("PROTROOT") <NEW_LINE> if protroot is not None: <NEW_LINE> <INDENT> source = source.replace("../src/", protroot) <NEW_LINE> <DEDENT> if not require and not os.path.exists(source): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> output = source.replace(".proto", "_pb2.py").replace("../src/", "") <NEW_LINE> if (not os.path.exists(output) or (os.path.exists(source) and os.path.getmtime(source) > os.path.getmtime(output))): <NEW_LINE> <INDENT> print("Generating %s..." % output) <NEW_LINE> if not os.path.exists(source): <NEW_LINE> <INDENT> sys.stderr.write("Can't find required file: %s\n" % source) <NEW_LINE> sys.exit(-1) <NEW_LINE> <DEDENT> if protoc is None: <NEW_LINE> <INDENT> sys.stderr.write( "protoc is not installed nor found in ../src. Please compile it " "or install the binary package.\n") <NEW_LINE> sys.exit(-1) <NEW_LINE> <DEDENT> protoc_command = [ protoc, "-I../src", "-I.", "--python_out=.", source ] <NEW_LINE> if protroot is not None: <NEW_LINE> <INDENT> protoc_command.append("-I" + protroot) <NEW_LINE> <DEDENT> if subprocess.call(protoc_command) != 0: <NEW_LINE> <INDENT> sys.exit(-1)
Invokes the Protocol Compiler to generate a _pb2.py from the given .proto file. Does nothing if the output already exists and is newer than the input.
625941b98e7ae83300e4ae49
def getGoal(self): <NEW_LINE> <INDENT> return self.goal
Returns the curent Goal object.
625941b9a219f33f346287f2
def isPowerOfTwo(self,n): <NEW_LINE> <INDENT> if n == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif n == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif n % 2 == 0: <NEW_LINE> <INDENT> return self.isPowerOfTwo(n/2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
:type n: int :rtype: bool
625941b9b545ff76a8913c9c
def db_psql_delete_database(dbname): <NEW_LINE> <INDENT> sudo('echo "DROP DATABASE {};" | sudo -u postgres psql'.format(dbname))
Delete (drop) a database - Ex: (cmd:<dbname>)
625941b924f1403a926009e7
def resolve_conflicts(self): <NEW_LINE> <INDENT> neighbours = self.nodes <NEW_LINE> new_chain = None <NEW_LINE> max_length = len(self.chain) <NEW_LINE> for node in neighbours: <NEW_LINE> <INDENT> response = requests.get('http://{node}/chain') <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> length = response.json()['length'] <NEW_LINE> chain = response.json()['chain'] <NEW_LINE> if length > max_length and self.valid_chain(chain): <NEW_LINE> <INDENT> max_length = length <NEW_LINE> new_chain = chain <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if new_chain: <NEW_LINE> <INDENT> self.chain = new_chain <NEW_LINE> return True <NEW_LINE> <DEDENT> return False
This is our consensus algorithm, it resolves conflicts by replacing our chain with the longest one in the network. :return: True if our chain was replaced, False if not
625941b973bcbd0ca4b2befb
def add_observer_bot(self, new_bot): <NEW_LINE> <INDENT> if not any (isinstance(bot, new_bot.__class__) for bot in self._observer_bots): <NEW_LINE> <INDENT> print("Adding observer: {}".format(new_bot.__class__.__name__)) <NEW_LINE> self._observer_bots.append(new_bot)
We assume that since every specific bot type represents a certain responsibility, only one instance of each type will be allowed to observe the forum. Therefore, only one instance of any child of AbstractObserverBot will be added to the list of observers.
625941b9cc0a2c11143dcd16
def pack_to(self, writer): <NEW_LINE> <INDENT> writer.write(self._objectID, 'I') <NEW_LINE> writer.write(self._objectFamily, 'i') <NEW_LINE> writer.write(self._objectType, 'i')
Writes the current ConnectedObjectState to the given BinaryWriter.
625941b963d6d428bbe4436d
def _readheader(self, infile): <NEW_LINE> <INDENT> self.header = {} <NEW_LINE> hstr = infile.read(4096) <NEW_LINE> if (hstr.find('# ') == -1): <NEW_LINE> <INDENT> return self.header <NEW_LINE> <DEDENT> hstr = hstr[hstr.index('# '):] <NEW_LINE> hstr = hstr[:hstr.index('\x00')] <NEW_LINE> hstr = hstr.split('#') <NEW_LINE> go_on = True <NEW_LINE> while go_on: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> hstr.remove('') <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> go_on = False <NEW_LINE> <DEDENT> <DEDENT> for line in hstr: <NEW_LINE> <INDENT> line = line[1:line.index('\r\n')] <NEW_LINE> if line.find(':') > -1: <NEW_LINE> <INDENT> dump = line.split(':') <NEW_LINE> self.header[dump[0]] = dump[1] <NEW_LINE> <DEDENT> elif line.find('=') > -1: <NEW_LINE> <INDENT> dump = line.split('=') <NEW_LINE> self.header[dump[0]] = dump[1] <NEW_LINE> <DEDENT> elif line.find(' ') > -1: <NEW_LINE> <INDENT> i = line.find(' ') <NEW_LINE> self.header[line[:i]] = line[i:] <NEW_LINE> <DEDENT> elif line.find(',') > -1: <NEW_LINE> <INDENT> dump = line.split(',') <NEW_LINE> self.header[dump[0]] = dump[1] <NEW_LINE> <DEDENT> <DEDENT> return self.header
Parser based approach Gets all entries
625941b991af0d3eaac9b892
def chgat(self, x_pos, y_pos, length, color): <NEW_LINE> <INDENT> if self.lines > y_pos >= 0: <NEW_LINE> <INDENT> if x_pos >= 0 and (x_pos + length) <= self.columns: <NEW_LINE> <INDENT> self.window.chgat(y_pos, x_pos, length, color)
Wraps call around curses.window.chgat.
625941b95166f23b2e1a4fd7
def raw_request(self, url, method, params, data=None, headers=None): <NEW_LINE> <INDENT> if method == 'GET': <NEW_LINE> <INDENT> resp = requests.get(url, params=params, headers=headers) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resp = requests.post(url, params=params, data=data, headers=headers) <NEW_LINE> <DEDENT> resp_dict = { 'status_code': resp.status_code, 'headers': resp.headers, 'content': resp.content } <NEW_LINE> return resp_dict
Performs the actual http requests to OptimoRoute's service, by using the ``requests`` library. If for some reason someone would want to use a different library, this is the function they should override. The only requirement is to always return a dictionary of the form: { 'status_code': $INT_STATUS_CODE, 'headers': {...}, 'content': '$NON-PROCESSED_STRING_CONTENT_RETURN_BY_SERVER' } :param url: the full url for the specific operation :param method: the HTTP method ('GET' and 'POST' are currently supported) :param params: url parameters (the access key is always passed in this way) :param data: (optional) for POST operations it will hold the data that will be sent to the server. :param headers: (optional) dictionary with any additional custom headers :return: dictionary containing the server's raw response
625941b997e22403b379ce16
def add_to( v, to, G, color, sum_ins, sum_out, tot_mas, ver_wei): <NEW_LINE> <INDENT> com_col = color[to] <NEW_LINE> sum_tot = sum_ins[com_col] + sum_out[com_col] <NEW_LINE> k_v = ver_wei[v] <NEW_LINE> k_v_in = np.sum(G[v, np.where(color == com_col)]) <NEW_LINE> first = (k_v_in / (2 * tot_mas)) <NEW_LINE> second = (sum_tot * k_v) / (2 * (tot_mas ** 2)) <NEW_LINE> add_ans = first - second <NEW_LINE> k_v_out = k_v - k_v_in <NEW_LINE> return add_ans, k_v_in, k_v_out
Add vertex to community v - vertex, that we want to add to - vertex, to which community we want to add v G - adjacency matrix color - labels color sum_ins - sum of edges inside community sum_out - sum of edges outside community tot_mas - sum of all edges ver_wei - sum of edges, that are connected to vertices
625941b9566aa707497f43f7
def range_alpha_light(alpha_range,time): <NEW_LINE> <INDENT> alpha_range_light=[] <NEW_LINE> for i in range(len(alpha_range)): <NEW_LINE> <INDENT> alpha_i=[] <NEW_LINE> min_i=alpha_range[i][0][0] <NEW_LINE> max_i=alpha_range[i][alpha_range[i].shape[0]-1][0] <NEW_LINE> alpha_i.append(min_i) <NEW_LINE> if min_i!=max_i: <NEW_LINE> <INDENT> alpha_i.append(max_i) <NEW_LINE> <DEDENT> alpha_i=array(transpose([alpha_i])) <NEW_LINE> alpha_range_light.append(alpha_i) <NEW_LINE> <DEDENT> return alpha_range_light
Creates a time-range vector with only min and max values of all alpha Parameters ---------- alpha_range : time-range vector The time-range vector included between an alpha min and an alpha max time : vector Time vector Returns ------- alpha_range_light : vector Time-range vector with only min and max values of all alpha
625941b9442bda511e8be2a3
def process_update_network_service_policy(self, session, data, result): <NEW_LINE> <INDENT> pass
Process extended attributes for network_service_policy update. :param session: database session :param data: dictionary of incoming network_service_policy data :param result: network_service_policy dictionary to extend Called inside transaction context on session to validate and update any extended network_service_policy attributes defined by this driver. Extended attribute values, whether updated or not, must also be added to result.
625941b955399d3f05588531
def login(self): <NEW_LINE> <INDENT> brw = self.browser <NEW_LINE> self.logger.debug('Requesting first login page.') <NEW_LINE> brw.open('https://onlinebanking.aib.ie/inet/roi/login.htm') <NEW_LINE> brw.select_form(selector='#loginCiamForm') <NEW_LINE> self.logger.debug('Clicking large CONTINUE button on the entry page.') <NEW_LINE> response = brw.submit_selected() <NEW_LINE> assert response.status_code == 401 <NEW_LINE> url = self.extract_value('window.location') <NEW_LINE> encoded_post_params = self.extract_value('encodedPostParams') <NEW_LINE> self.logger.debug('Bouncing through the interstitial.') <NEW_LINE> response = brw.open(url, headers={'Referer': response.url}) <NEW_LINE> brw.select_form() <NEW_LINE> brw['pf.username'] = self.logindata['regNumber'] <NEW_LINE> brw['pf.pass'] = self.logindata['pin'] <NEW_LINE> self.logger.debug('Submitting login form.') <NEW_LINE> brw.submit_selected() <NEW_LINE> tfa_done = False <NEW_LINE> while not tfa_done: <NEW_LINE> <INDENT> brw.select_form('#finalizeForm') <NEW_LINE> response = brw.submit_selected(update_state=False) <NEW_LINE> if response.content == b'approved': <NEW_LINE> <INDENT> tfa_done = True <NEW_LINE> <DEDENT> elif response.content != b'in_progress': <NEW_LINE> <INDENT> raise RuntimeError( 'unexpected answer during 2FA auth: %s' % response.content ) <NEW_LINE> <DEDENT> time.sleep(1) <NEW_LINE> <DEDENT> brw.select_form('#finalizeForm') <NEW_LINE> response = brw.submit_selected() <NEW_LINE> form = brw.select_form(nr=0) <NEW_LINE> form.new_control('hidden', 'state', self.extract_value('state.value')) <NEW_LINE> form.new_control('hidden', 'nonce', self.extract_value('encodedNonce')) <NEW_LINE> form.new_control('hidden', 'postParams', encoded_post_params) <NEW_LINE> response = brw.submit_selected() <NEW_LINE> assert response.status_code == 200 <NEW_LINE> brw.select_form(nr=0) <NEW_LINE> response = brw.submit_selected() <NEW_LINE> assert response.status_code == 200 <NEW_LINE> if brw.page.find(string='My Accounts'): <NEW_LINE> <INDENT> self.login_done = True
Go through the login process.
625941b9f548e778e58cd3fa
def perform_authentication(self, request): <NEW_LINE> <INDENT> pass
默认视图在进行请求分发时就会进行认证 在视图中重写此方法,如果内部直接pass,表示在请求分发时,先不要认证,让请求可以正常访问 目的:延后它的认证,为了让未登录用户也能先访问我的视图 将来自己去写认证,
625941b9e64d504609d746be
def __init__(self, entry, valid_combinations): <NEW_LINE> <INDENT> Exception.__init__(self) <NEW_LINE> self._entry = entry <NEW_LINE> self._valid_combinations = valid_combinations
Initialize this exception with combinations.
625941b9a8370b771705271f
def stocGradAscent(features, labels, alpha=0.001): <NEW_LINE> <INDENT> featureMatrix = mat(features) <NEW_LINE> m, n = shape(featureMatrix) <NEW_LINE> weights = ones((n, 1)) <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> h = sigmoid(sum(featureMatrix[i]*weights)) <NEW_LINE> error = labels[i] - h <NEW_LINE> weights += alpha * featureMatrix[i].transpose() * error <NEW_LINE> <DEDENT> return weights
随机梯度上升算法: - 在线学习算法:一次仅用一个样本点来更新回归系数,可以在新样本到来时对分类器进行增量式更新 - 与梯度上升算法相比,占用更少的计算资源
625941b930c21e258bdfa31b
def resnet_v2_50(inputs, num_classes=None, is_training=True, return_raw=True, global_pool=True, output_stride=None, spatial_squeeze=True, reuse=None, scope='resnet_v2_50'): <NEW_LINE> <INDENT> blocks = [ resnet_v2_block('block1', base_depth=64, num_units=3, stride=2), resnet_v2_block('block2', base_depth=128, num_units=4, stride=2), resnet_v2_block('block3', base_depth=256, num_units=6, stride=2), resnet_v2_block('block4', base_depth=512, num_units=3, stride=1), ] <NEW_LINE> return resnet_v2(inputs, blocks, num_classes, is_training=is_training, return_raw=return_raw, global_pool=global_pool, output_stride=output_stride, include_root_block=True, spatial_squeeze=spatial_squeeze, reuse=reuse, scope=scope)
ResNet-50 model of [1]. See resnet_v2() for arg and return description.
625941b9656771135c3eb6f0
def run(self): <NEW_LINE> <INDENT> self.chessblack.doWait() <NEW_LINE> while True: <NEW_LINE> <INDENT> userinput = input("请用户输入下棋坐标:") <NEW_LINE> ret = self.engine.userGo(self.chessblack,userinput) <NEW_LINE> if ret: <NEW_LINE> <INDENT> print("我是客户端,我方已下完棋") <NEW_LINE> ret = self.chessblack.getPos() <NEW_LINE> msg = str(ret[0])+str(',')+str(ret[1]) <NEW_LINE> self.client_socket.send(msg.encode('gbk')) <NEW_LINE> self.chessblack.doNotify() <NEW_LINE> self.chessblack.doWait() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("下棋重复")
子线程执行的代码
625941b966656f66f7cbc028
def _calc_deltas(self): <NEW_LINE> <INDENT> deltas = [self.alfa[i]*self.t0/(self.d[i]*self.d[i]) for i in range(3)] <NEW_LINE> return deltas
:return: dimensionless thermal diffusivity
625941b9187af65679ca4f9b
def getHeader( self ): <NEW_LINE> <INDENT> return "\t".join(self.mHeader)
return header
625941b963f4b57ef0000fa0
def clear_orphaned_result(self, item=None): <NEW_LINE> <INDENT> Result = apps.get_model('lab_clinic_api', 'result') <NEW_LINE> LisResultItem = apps.get_model('lab_result_item', 'resultitem') <NEW_LINE> if isinstance(item, Result): <NEW_LINE> <INDENT> result = item <NEW_LINE> <DEDENT> elif Result.objects.filter(order__order_identifier=item).exists(): <NEW_LINE> <INDENT> result = Result.objects.get(order__order_identifier=item, status='NEW') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = None <NEW_LINE> <DEDENT> if result: <NEW_LINE> <INDENT> if not LisResult.objects.using('lab_api').filter(result_identifier=result.result_identifier).exists(): <NEW_LINE> <INDENT> lis_result = LisResult.objects.using('lab_api').get(result_identifier=result.result_identifier) <NEW_LINE> if not LisResultItem.objects.using('lab_api').filter(result=lis_result).exists(): <NEW_LINE> <INDENT> lis_order = lis_result.order <NEW_LINE> lis_order.status = 'WITHDRAWN' <NEW_LINE> logger.info(' refreshing order status on django-lis for order {0}'.format(lis_order.order_identifier)) <NEW_LINE> lis_order.save() <NEW_LINE> logger.info(' deleting orphaned result on django-lis (no items) for order {0}'.format(lis_result.order.order_identifier)) <NEW_LINE> lis_result.delete() <NEW_LINE> <DEDENT> <DEDENT> ResultItem = apps.get_model('lab_clinic_api', 'resultitem') <NEW_LINE> if not ResultItem.objects.filter(result=result).exists(): <NEW_LINE> <INDENT> order = result.order <NEW_LINE> logger.info(' refreshing order status on EDC for order {0}'.format(order.order_identifier)) <NEW_LINE> order.save() <NEW_LINE> logger.info(' deleting orphaned result (no items) on EDC for order {0}'.format(result.order.order_identifier)) <NEW_LINE> result.delete()
Verifies and, if required, deletes the result of a 'PENDING' order on the EDC that has no result items and the result is NEW. Args: item: can be either a Result instance or an order_identifier.
625941b915fb5d323cde0988
@blueprint.route('/note/view/<note_id>') <NEW_LINE> @role_required('operator') <NEW_LINE> def note_view_route(note_id): <NEW_LINE> <INDENT> note = Note.query.get(note_id) <NEW_LINE> return render_template('storage/note/view.html', note=note, button_form=ButtonForm())
view note
625941b9d8ef3951e32433bb
def CreateRecipe(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
Create new recipe
625941b9d10714528d5ffb5e
def factory_basename(dir_ann, dir_gt): <NEW_LINE> <INDENT> names_gt = tuple(os.listdir(dir_gt)) <NEW_LINE> path_train = os.path.join('splits', 'kitti_train.txt') <NEW_LINE> path_val = os.path.join('splits', 'kitti_val.txt') <NEW_LINE> _, set_val_gt = split_training(names_gt, path_train, path_val) <NEW_LINE> set_val_gt = {os.path.basename(x).split('.')[0] for x in set_val_gt} <NEW_LINE> list_ann = glob.glob(os.path.join(dir_ann, '*.json')) <NEW_LINE> set_basename = {os.path.basename(x).split('.')[0] for x in list_ann} <NEW_LINE> set_val = set_basename.intersection(set_val_gt) <NEW_LINE> assert set_val, " Missing json annotations file to create txt files for KITTI datasets" <NEW_LINE> return set_val
Return all the basenames in the annotations folder corresponding to validation images
625941b9cc40096d615957d1
def onExceptionRaiseExit(self, raisable_exceptions, collection=None): <NEW_LINE> <INDENT> if collection is None: <NEW_LINE> <INDENT> collection = self <NEW_LINE> <DEDENT> if self.exception_collections is not None: <NEW_LINE> <INDENT> self.exception_collections.append( TraceCollectionBranch(parent=collection, name="exception") )
Indicate to the trace collection what exceptions may have occurred. Args: raisable_exception: Currently ignored, one or more exceptions that could occur, e.g. "BaseException". collection: To pass the collection that will be the parent Notes: Currently this is unused. Passing "collection" as an argument, so we know the original collection to attach the branch to, is maybe not the most clever way to do this We also might want to specialize functions for specific exceptions, there is little point in providing BaseException as an argument in so many places. The actual storage of the exceptions that can occur is currently missing entirely. We just use this to detect "any exception" by not being empty.
625941b9c432627299f04ac2
def get_download_url(file_name): <NEW_LINE> <INDENT> return f"{settings.EXPORT_MEDIA_ROOT.rstrip('/')}/{str(file_name)}"
A URL path to the run data :param run_uid: The unique identifier for the run data. :return: The url context. (e.g. /downloads/123e4567-e89b-12d3-a456-426655440000)
625941b98e71fb1e9831d62b
def dispatch_to_server(self, pub_msg): <NEW_LINE> <INDENT> assert isinstance(pub_msg, Publish) <NEW_LINE> if self.authorization.is_publish_allowed(pub_msg.topic): <NEW_LINE> <INDENT> self.server.handle_incoming_publish(pub_msg, self.uid) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.warn("[uid: %s] is not allowed to publish on %s" % (self.uid, pub_msg.topic))
Dispatches a Publish message to the server for further processing, ie. delivering it to the appropriate subscribers. :param Publish pub_msg: A :class:`broker.messages.Publish` instance.
625941b9be383301e01b530b
def unsubscribe_user_to_calendar(user, course_key): <NEW_LINE> <INDENT> UserCalendarSyncConfig.objects.filter(user=user, course_key=course_key).update(enabled=False)
Disables the Calendar Sync config for a particular user and course. If the instance does not exist, this function will do nothing. Parameters: user (User): The user to subscribe course_key (CourseKey): The course key for the subscription
625941b9e64d504609d746bf
def get_features(path): <NEW_LINE> <INDENT> data_files = os.listdir(path) <NEW_LINE> N = len(data_files) <NEW_LINE> D = 299 * 13 <NEW_LINE> X = np.zeros((N, D)) <NEW_LINE> sample_num = 0 <NEW_LINE> for f in data_files: <NEW_LINE> <INDENT> ts, sample_rate = li.core.load(path + "/" + f, sr=16000) <NEW_LINE> clipped_ts = np.zeros(3 * sample_rate) <NEW_LINE> file_id = f.rstrip(".wav").split("-") <NEW_LINE> if int(file_id[1]) == 1: <NEW_LINE> <INDENT> ts = ts[int(0.5 * sample_rate):int(3.5 * sample_rate)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ts = ts[int(1 * sample_rate):int(4 * sample_rate)] <NEW_LINE> <DEDENT> clipped_ts[0:len(ts)] = ts <NEW_LINE> mfcc_vec = mfcc(clipped_ts, sample_rate).flatten() <NEW_LINE> assert(mfcc_vec.shape[0] == D) <NEW_LINE> X[sample_num] = mfcc_vec <NEW_LINE> sample_num += 1 <NEW_LINE> if sample_num % 100 == 0: <NEW_LINE> <INDENT> print(100 * sample_num / N, "% done extracting features from {}".format(path), sep = '') <NEW_LINE> <DEDENT> <DEDENT> return X, N, D
This function extracts mfcc features from each of the wav files located at the specified path and returns a feature matrix containing the mfcc features for each sample :param path: str: path to directory containing the wav files for feature extraction N: int: number of data samples D: int: number of feature dimensions :return: X: ndarray (N,D): matrix of mfcc features for each sample N: int: number of data samples D: int: number of dimensions per feature
625941b93c8af77a43ae361b
def test_yaml_platform_overrive(self): <NEW_LINE> <INDENT> if platform.system() == 'Windows': <NEW_LINE> <INDENT> other = 'unix' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> other = 'windows' <NEW_LINE> <DEDENT> with self._task_config.using_platform(other): <NEW_LINE> <INDENT> res = self._task_config.properties['ptest'] <NEW_LINE> if platform.system() == 'Windows': <NEW_LINE> <INDENT> self.assertEqual(res, 'unix') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertEqual(res, 'win')
Make sure we can swap platforms
625941b944b2445a33931f1e
def __init__(self, coordinates, hFig): <NEW_LINE> <INDENT> self.delay = 0 <NEW_LINE> self.hL = [0, 0, 0, 0] <NEW_LINE> self.xData_0_L = np.zeros(4) <NEW_LINE> self.yData_0_L = np.zeros(4) <NEW_LINE> self.R = 0.35 <NEW_LINE> self.L = 0.15 <NEW_LINE> self.hFig = hFig <NEW_LINE> self.position = np.array(coordinates) <NEW_LINE> x = coordinates[0] <NEW_LINE> y = coordinates[1] <NEW_LINE> self.x_0 = x + 0.5 <NEW_LINE> self.y_0 = y + 0.5 <NEW_LINE> self.hCorp = mpatches.Circle((self.x_0, self.y_0), self.R, facecolor='y', edgecolor='k', linewidth=1, clip_on=False, zorder=15) <NEW_LINE> self.hAxes = plt.gca() <NEW_LINE> self.hAxes.add_patch(self.hCorp) <NEW_LINE> self.sond_data_init() <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> self.hL[i] = mpatches.Rectangle( (self.xData_0_L[i] + self.x_0, self.yData_0_L[i] + self.y_0), width=self.L, height=self.L, facecolor='k', edgecolor='k', zorder=16) <NEW_LINE> self.hAxes.add_patch(self.hL[i]) <NEW_LINE> <DEDENT> self.dr = Draggable(self.hCorp, self.hL, self.hFig, self)
Designer of the oriented robot and its graphical image SYNTAX: r = Body( coordinates ) - coordinates = [x, y] - Cartesian coordinates of the lower left corner of the cell - - hid = handle of the graphics window work :param coordinates: initial robot position on the field :param hFig: reference to plot Figure
625941b9187af65679ca4f9c
def start(self, start=None): <NEW_LINE> <INDENT> return self.bounds(start)[0]
Return the start point of the Iteration if the lower limit is known, ``None`` otherwise.
625941b98a349b6b435e7ff3
def get_tuples(self): <NEW_LINE> <INDENT> self._read() <NEW_LINE> return zip(self.data["scores"], self.data["costs"], self.data["tw"])
Return zipped data about vertices
625941b967a9b606de4a7d3b
def _build_single_stream_model(self, data, embedding_fn, encoder_fn_1, encoder_fn_2, logits_fn): <NEW_LINE> <INDENT> if isinstance(embedding_fn, (modules.Embeddding, modules.CachedElmoModule)): <NEW_LINE> <INDENT> embedded_tokens_1 = embedding_fn(data.source_1) <NEW_LINE> <DEDENT> if isinstance(embedding_fn, modules.TFHubElmoEmbedding): <NEW_LINE> <INDENT> embedded_tokens_1 = embedding_fn( data.source_1, data.source_1_sequence_length) <NEW_LINE> <DEDENT> if isinstance(encoder_fn_1, modules.LstmEncoder): <NEW_LINE> <INDENT> outputs_1, _ = encoder_fn_1( inputs=embedded_tokens_1, sequence_length=data.source_1_sequence_length) <NEW_LINE> <DEDENT> if isinstance(encoder_fn_1, modules.TransformerEncoder): <NEW_LINE> <INDENT> outputs_1 = encoder_fn_1( inputs=embedded_tokens_1, sequence_length=data.source_1_sequence_length) <NEW_LINE> <DEDENT> if isinstance(encoder_fn_1, modules.PairEncoderWithAttention): <NEW_LINE> <INDENT> raise ValueError("In single stream model, " "`PairEncoderWithAttention` is not supported") <NEW_LINE> <DEDENT> features = tf.reduce_max(outputs_1, axis=1) <NEW_LINE> logits = logits_fn(features) <NEW_LINE> predictions = tf.argmax(logits, axis=1) <NEW_LINE> cross_entropy = tf.reduce_mean( tf.nn.sparse_softmax_cross_entropy_with_logits( labels=data.target, logits=logits)) <NEW_LINE> if self._debug_mode: <NEW_LINE> <INDENT> def _add_to_debug(var_name, var): <NEW_LINE> <INDENT> self._debug[var_name].append(var) <NEW_LINE> <DEDENT> _add_to_debug("embedded_tokens_1", embedded_tokens_1) <NEW_LINE> _add_to_debug("outputs_1", outputs_1) <NEW_LINE> _add_to_debug("features", features) <NEW_LINE> _add_to_debug("logits", logits) <NEW_LINE> _add_to_debug("predictions", predictions) <NEW_LINE> _add_to_debug("cross_entropy", cross_entropy) <NEW_LINE> <DEDENT> return logits, predictions, cross_entropy
BiLSTM with max pooling, but ignore sequence_2
625941b938b623060ff0ac6d
def __init__(self): <NEW_LINE> <INDENT> object.__setattr__(self, "__conf", dict()) <NEW_LINE> object.__setattr__(self, "__frozen", False)
Constructor
625941b992d797404e304008
@public.add <NEW_LINE> def cp(source, target, force=True): <NEW_LINE> <INDENT> if (os.path.exists(target) and not force) or source == target: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _copy(source, target)
Copy the directory/file src to the directory/file target
625941b97d847024c06be13f
def client_send_request_1(trf_from, trf_to, trf_amount): <NEW_LINE> <INDENT> print("-" * 60) <NEW_LINE> print("CLIENT1 : New Request:") <NEW_LINE> print("CLIENT1 : from = [{0}]".format(trf_from)) <NEW_LINE> print("CLIENT1 : to = [{0}]".format(trf_to)) <NEW_LINE> print("CLIENT1 : amount = [{0}]".format(trf_amount)) <NEW_LINE> if not re.match('^[A-Za-z]+$', trf_from): <NEW_LINE> <INDENT> print("CLIENT1 : Request is INVALID: Incorrect 'from': [{0}]".format(trf_from)) <NEW_LINE> print("-" * 60) <NEW_LINE> return (False, None) <NEW_LINE> <DEDENT> if not re.match('^[A-Za-z]+$', trf_to): <NEW_LINE> <INDENT> print("CLIENT1 : Request is INVALID: Incorrect 'to': [{0}]".format(trf_to)) <NEW_LINE> print("-" * 60) <NEW_LINE> return (False, None) <NEW_LINE> <DEDENT> if not re.match('^[0-9]+$', str(trf_amount)): <NEW_LINE> <INDENT> print("CLIENT1 : Request is INVALID: Incorrect 'amount': [{0}]".format(trf_amount)) <NEW_LINE> print("-" * 60) <NEW_LINE> return (False, None) <NEW_LINE> <DEDENT> msg = "from={0}&to={1}&amount={2}".format(trf_from, trf_to, trf_amount) <NEW_LINE> msg = utils.rawstr2bytes(msg) <NEW_LINE> iv = utils.rand_bytes(blk_sz) <NEW_LINE> print("CLIENT1 : Calculating CBC-MAC...") <NEW_LINE> mac = cbc_mac(msg, iv) <NEW_LINE> debug_msg("-" * 60) <NEW_LINE> print("CLIENT1 : Calculating CBC-MAC...done") <NEW_LINE> req = msg + iv + mac <NEW_LINE> debug_msg("CLIENT1 : msg = [{0}]".format(msg)) <NEW_LINE> debug_msg("CLIENT1 : iv = [{0}]".format(iv.hex())) <NEW_LINE> debug_msg("CLIENT1 : mac = [{0}]".format(mac.hex())) <NEW_LINE> debug_msg("CLIENT1 : req = [{0}]".format(req)) <NEW_LINE> print("CLIENT1 : Sending to server...") <NEW_LINE> ok = server_process_request_1(req) <NEW_LINE> print("CLIENT1 : Sending to server...done") <NEW_LINE> print("CLIENT1 : Request {0}".format("OK" if ok else "FAILED")) <NEW_LINE> print("-" * 60) <NEW_LINE> return (ok, req)
Simulates the client request message sending for the 1st scenario.
625941b9dd821e528d63b02a
def get_network_constitution(self): <NEW_LINE> <INDENT> with requests.get(f"{self.node_url}/constitution") as res: <NEW_LINE> <INDENT> return decode(res.text)
Get the constitution of the network
625941b9cad5886f8bd26e61
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> stripped_name = " ".join( w for w in self.name.split() if w not in STOPWORDS ) <NEW_LINE> if not self.slug: <NEW_LINE> <INDENT> self.slug = uuslug( stripped_name, instance=self, max_length=100, separator="-", start_no=2, ) <NEW_LINE> <DEDENT> super(Office, self).save(*args, **kwargs)
**uid**: :code:`{body.uid | jurisdiction.uid}_office:{slug}`
625941b97b25080760e392d9
def json_payload(self): <NEW_LINE> <INDENT> return { 'point': list(self.point), 'value': self.value, 'value_var': self.noise_variance, }
Convert the sample_point into a dict to be consumed by json for a REST request.
625941b950485f2cf553cc17
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V2beta1ExternalMetricSource): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict()
Returns true if both objects are equal
625941b91f037a2d8b94607d
def version_cmp(pkg1, pkg2, ignore_epoch=False): <NEW_LINE> <INDENT> normalize = lambda x: str(x).split(':', 1)[-1] if ignore_epoch else str(x) <NEW_LINE> pkg1 = normalize(pkg1) <NEW_LINE> pkg2 = normalize(pkg2) <NEW_LINE> output = __salt__['cmd.run_stdout'](['opkg', '--version'], output_loglevel='trace', python_shell=False) <NEW_LINE> opkg_version = output.split(' ')[2].strip() <NEW_LINE> if salt.utils.versions.LooseVersion(opkg_version) >= salt.utils.versions.LooseVersion('0.3.4'): <NEW_LINE> <INDENT> cmd_compare = ['opkg', 'compare-versions'] <NEW_LINE> <DEDENT> elif salt.utils.path.which('opkg-compare-versions'): <NEW_LINE> <INDENT> cmd_compare = ['opkg-compare-versions'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.warning('Unable to find a compare-versions utility installed. Either upgrade opkg to ' 'version > 0.3.4 (preferred) or install the older opkg-compare-versions script.') <NEW_LINE> return None <NEW_LINE> <DEDENT> for oper, ret in (("<<", -1), ("=", 0), (">>", 1)): <NEW_LINE> <INDENT> cmd = cmd_compare[:] <NEW_LINE> cmd.append(_cmd_quote(pkg1)) <NEW_LINE> cmd.append(oper) <NEW_LINE> cmd.append(_cmd_quote(pkg2)) <NEW_LINE> retcode = __salt__['cmd.retcode'](cmd, output_loglevel='trace', ignore_retcode=True, python_shell=False) <NEW_LINE> if retcode == 0: <NEW_LINE> <INDENT> return ret <NEW_LINE> <DEDENT> <DEDENT> return None
Do a cmp-style comparison on two packages. Return -1 if pkg1 < pkg2, 0 if pkg1 == pkg2, and 1 if pkg1 > pkg2. Return None if there was a problem making the comparison. ignore_epoch : False Set to ``True`` to ignore the epoch when comparing versions .. versionadded:: 2016.3.4 CLI Example: .. code-block:: bash salt '*' pkg.version_cmp '0.2.4-0' '0.2.4.1-0'
625941b9851cf427c661a391
def connect(self): <NEW_LINE> <INDENT> self.logger.info('Connecting to %s', self._prefix) <NEW_LINE> account = 'opensuse' <NEW_LINE> server = 'rabbit.opensuse.org' <NEW_LINE> if self._prefix == 'suse': <NEW_LINE> <INDENT> account = 'suse' <NEW_LINE> server = 'rabbit.suse.de' <NEW_LINE> <DEDENT> credentials = pika.PlainCredentials(account, account) <NEW_LINE> context = ssl.create_default_context() <NEW_LINE> ssl_options = pika.SSLOptions(context, server) <NEW_LINE> parameters = pika.ConnectionParameters(server, 5671, '/', credentials, ssl_options=ssl_options, socket_timeout=10) <NEW_LINE> return pika.SelectConnection(parameters, on_open_callback=self.on_connection_open)
This method connects to RabbitMQ, returning the connection handle. When the connection is established, the on_connection_open method will be invoked by pika. :rtype: pika.SelectConnection
625941b9adb09d7d5db6c611
def match_command(self, c, buttons): <NEW_LINE> <INDENT> for b in self.command_list[c]['buttons']: <NEW_LINE> <INDENT> if b < 0 or len(buttons) <= b or buttons[b] != 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return sum(buttons) == len(self.command_list[c]['buttons'])
Find a command matching a joystick configuration
625941b9d164cc6175782bcd
def items(self): <NEW_LINE> <INDENT> return self._dict.items()
Exposes the items of the internal dictionary
625941b90383005118ecf463
def traceConfluent(self, aDEVS): <NEW_LINE> <INDENT> pass
Called for each atomic DEVS model that does a confluent transition.
625941b9925a0f43d2549cf2
def __init__(self, num_buckets=1024, own_hash = None): <NEW_LINE> <INDENT> self.dict = [] <NEW_LINE> for i in range(num_buckets): <NEW_LINE> <INDENT> self.dict.append((None, None)) <NEW_LINE> <DEDENT> self.keys = dict() <NEW_LINE> self.hash = hash <NEW_LINE> if own_hash != None: <NEW_LINE> <INDENT> own_hash.modulo = len(self.dict) <NEW_LINE> self.hash = own_hash.get_hash <NEW_LINE> <DEDENT> self.num_collisions = 0 <NEW_LINE> self.num_calls = 0
Initializes a dictionary with the given number of buckets.
625941b966656f66f7cbc029
def test_ks_2samp(self): <NEW_LINE> <INDENT> data1 = [ self.L, self.A ] <NEW_LINE> data2 = [ self.M, self.B ] <NEW_LINE> results = (-0.20000000000000007, 0.77095294467662123) <NEW_LINE> i = 0 <NEW_LINE> for d in data1: <NEW_LINE> <INDENT> self.assertEqual( stats.ks_2samp( d, data2[i] )[i], results[i] ) <NEW_LINE> i += 1
Testing ks_2samp
625941b9ec188e330fd5a625
def insert_tables(cur, conn): <NEW_LINE> <INDENT> for query in insert_table_queries: <NEW_LINE> <INDENT> cur.execute(query) <NEW_LINE> conn.commit()
This function is responsible for extracting data from the staged_tables stage_events and stage_songs into the fact and dimension tables. inputs: cur: pyscopg2 cursor object for query execution conn: a connection object return by psycopg2 output: Returns None
625941b9d6c5a10208143ec6
def _ReadRows(self): <NEW_LINE> <INDENT> raise NotImplementedError("_ReadRows must be overridden by subclasses.")
Generator that returns one row at a time. THIS METHOD MUST BE OVERRIDDEN BY SUBCLASSES. Returns: The next row, as a dict keyed by the column names.
625941b915fb5d323cde0989
def rules_from_wordform(word_form): <NEW_LINE> <INDENT> rules = [] <NEW_LINE> if word_form in form_to_ids: <NEW_LINE> <INDENT> for key in form_to_ids[word_form]: <NEW_LINE> <INDENT> rules.extend(rules_from_wordid(key)) <NEW_LINE> <DEDENT> <DEDENT> return list(set(rules))
Find all syntactic rules for the word_form
625941b924f1403a926009e8
def _obtain_args(): <NEW_LINE> <INDENT> if request.method == "GET": <NEW_LINE> <INDENT> args = _obtain_get_args() <NEW_LINE> <DEDENT> elif request.method in ("POST", "PUT"): <NEW_LINE> <INDENT> args = {} <NEW_LINE> args.update(_obtain_get_args()) <NEW_LINE> args.update(_obtain_form_args()) <NEW_LINE> args.update(_obtain_json_args()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args = {} <NEW_LINE> <DEDENT> return args
获取请求参数,参数重复时只取一个。 GET请求:获取 URL 参数。 POST请求:参数优先级:JSON参数 > FORM参数 > URL参数。
625941b929b78933be1e5538
def finishElement(self): <NEW_LINE> <INDENT> if len(self.__scopeStack) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> lastScope = self.__activeScope <NEW_LINE> nextScope = self.__scopeStack.pop() <NEW_LINE> tmpPElist = [] <NEW_LINE> for element in self.__elementDict[lastScope]: <NEW_LINE> <INDENT> if isinstance(self.__elementDict[lastScope][element], list): <NEW_LINE> <INDENT> for entry in self.__elementDict[lastScope][element]: <NEW_LINE> <INDENT> tmpPElist.append(entry) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(self.__elementDict[lastScope][element], dict): <NEW_LINE> <INDENT> for innerElement in self.__elementDict[lastScope][element]: <NEW_LINE> <INDENT> for entry in innerElement: <NEW_LINE> <INDENT> tmpPElist.append(entry) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeWarning( "Expected an element of type list or dict!" ) <NEW_LINE> <DEDENT> <DEDENT> self.__elementDict[nextScope].update({lastScope: tmpPElist}) <NEW_LINE> self.__elementDict.pop(lastScope, None) <NEW_LINE> if self.__symLibrary: <NEW_LINE> <INDENT> self._ag_updateAgs(lastScope, nextScope) <NEW_LINE> <DEDENT> self.__activeScope = nextScope
The first scope on the stack is closed. The element is still addressable with the scopes identifier.
625941b9925a0f43d2549cf3
def _config_to_list(self, config): <NEW_LINE> <INDENT> whitelisted = [] <NEW_LINE> sensitive = [] <NEW_LINE> for group in config: <NEW_LINE> <INDENT> for option in config[group]: <NEW_LINE> <INDENT> the_list = (sensitive if self._is_sensitive(group, option) else whitelisted) <NEW_LINE> the_list.append({ 'group': group, 'option': option, 'value': config[group][option]}) <NEW_LINE> <DEDENT> <DEDENT> return whitelisted, sensitive
Build whitelisted and sensitive lists for use by backend drivers.
625941b9a05bb46b383ec6ac
def surfTen(self): <NEW_LINE> <INDENT> st = self.AS.surface_tension() <NEW_LINE> try: <NEW_LINE> <INDENT> st_eng = st * 0.00571014709755764 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> st_eng = st <NEW_LINE> <DEDENT> return st_eng
calculate surface tension, lbf / inch
625941b9167d2b6e31218a1c
def normalize( arr: xr.DataArray, dim: Hashable = 'f', method: str = 'root_mean_square' ) -> xr.DataArray: <NEW_LINE> <INDENT> if method == 'root_mean_square': <NEW_LINE> <INDENT> ss = np.sqrt((arr*arr).mean(dim=dim)) <NEW_LINE> res = arr / ss <NEW_LINE> <DEDENT> elif method == 'snv': <NEW_LINE> <INDENT> std = arr.std(dim=dim) <NEW_LINE> mean = arr.mean(dim=dim) <NEW_LINE> res = (arr - mean) / std <NEW_LINE> <DEDENT> elif method == 'unit_variance': <NEW_LINE> <INDENT> std = arr.std(dim=dim) <NEW_LINE> res = arr / std <NEW_LINE> <DEDENT> return res.assign_attrs(arr.attrs)
Normalize spectra Normalized every spectrum contained in the dataarray. Args: arr: input array dim: array dimension that contains spectra, defaults to 'f' method: {'root_mean_square', 'snv', 'unit_variance'} Returns: array of same shape as input array but with normalized spectra
625941b9dc8b845886cb53b4
def move_squad_out(self, squad_num, direction): <NEW_LINE> <INDENT> f = self.field.get_adjacent(direction) <NEW_LINE> if f is not None: <NEW_LINE> <INDENT> f.queue.add(f, self.squads[squad_num]) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False
Moves a squad to an adjacent's field's queue. Returns True if there is an adjacent field.
625941b9097d151d1a222cdb
def which(command, path=None, verbose=0, exts=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> absName, fromWhere = next(whichgen(command, path, verbose, exts)) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise WhichError("Could not find '%s' on the path." % command) <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> return absName, fromWhere <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return absName
Return the full path to the first match of the given command on the path. "command" is a the name of the executable to search for. "path" is an optional alternate path list to search. The default it to use the PATH environment variable. "verbose", if true, will cause a 2-tuple to be returned. The second element is a textual description of where the match was found. "exts" optionally allows one to specify a list of extensions to use instead of the standard list for this system. This can effectively be used as an optimization to, for example, avoid stat's of "foo.vbs" when searching for "foo" and you know it is not a VisualBasic script but ".vbs" is on PATHEXT. This option is only supported on Windows. If no match is found for the command, a WhichError is raised.
625941b93617ad0b5ed67d7f
def process_view(self, request, view_func, view_args, view_kwargs): <NEW_LINE> <INDENT> if not view_func.__module__.startswith('django.contrib.admin.'): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not request.user.is_authenticated: <NEW_LINE> <INDENT> login_url = ( settings.LOGIN_URL + '?' + urlencode({auth.REDIRECT_FIELD_NAME: request.get_full_path()}) ) <NEW_LINE> return HttpResponseRedirect(login_url) <NEW_LINE> <DEDENT> if request.user.is_staff: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> error = '<h1>Forbidden</h1>' '<p>You do not have staff privileges.</p>' <NEW_LINE> return HttpResponseForbidden(error)
Forward unauthenticated requests to the admin page to the CAS login.
625941b9d6c5a10208143ec7
def main(args=None): <NEW_LINE> <INDENT> parser = get_parser() <NEW_LINE> args = parser.parse_args(args) <NEW_LINE> if args.meta_specfile: <NEW_LINE> <INDENT> metapackage = Metapackage( meta_name=args.meta_specfile, variables=args.variables) <NEW_LINE> print(metapackage.create_specfile()) <NEW_LINE> return <NEW_LINE> <DEDENT> if len(args.specfiles) > 1 and not args.i: <NEW_LINE> <INDENT> parser.error('You can only convert more specfiles using -i (in place) mode.') <NEW_LINE> <DEDENT> if len(args.specfiles) == 0 and sys.stdin.isatty(): <NEW_LINE> <INDENT> parser.error('You must either specify specfile(s) or reading from stdin.') <NEW_LINE> <DEDENT> args.skip_functions = args.skip_functions.split(',') <NEW_LINE> convertor = Convertor(options=vars(args)) <NEW_LINE> try: <NEW_LINE> <INDENT> convertor.handle_scl_deps() <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> print('Could not open file: {0}'.format(e)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> specs = [] <NEW_LINE> if len(args.specfiles) == 0 and not sys.stdin.isatty(): <NEW_LINE> <INDENT> specs.append(sys.stdin.readlines()) <NEW_LINE> <DEDENT> for specfile in args.specfiles: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(specfile) as f: <NEW_LINE> <INDENT> specs.append(f.readlines()) <NEW_LINE> <DEDENT> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> print('Could not open file: {0}'.format(e)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> for i, spec in enumerate(specs): <NEW_LINE> <INDENT> converted = convertor.convert(spec) <NEW_LINE> if not args.i or not args.specfiles: <NEW_LINE> <INDENT> print(converted) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = open(args.specfiles[i], 'w') <NEW_LINE> f.write(str(converted)) <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> print('Could not open file: {0}'.format(e)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f.close()
Main CLI entry point.
625941b931939e2706e4ccef
def check_array(self, value, name): <NEW_LINE> <INDENT> pass
Check an array's value.
625941b91f5feb6acb0c49d4
def _hooked(self, parent_task): <NEW_LINE> <INDENT> self._parent_tasks.append(parent_task) <NEW_LINE> def unhook(*args): <NEW_LINE> <INDENT> self._parent_tasks.remove(parent_task) <NEW_LINE> parent_task._unhook_task(self) <NEW_LINE> <DEDENT> self.observer.register(unhook)
A task has hooked us
625941b976e4537e8c3514f7
def fetch_path_data(self, current_path, list_type="directories"): <NEW_LINE> <INDENT> directories = [] <NEW_LINE> files = [] <NEW_LINE> if list_type == "directories" or list_type == "all": <NEW_LINE> <INDENT> directories = self.fetch_directories(current_path) <NEW_LINE> <DEDENT> if list_type == "files" or list_type == "all": <NEW_LINE> <INDENT> files = self.fetch_files(current_path) <NEW_LINE> <DEDENT> path_data = { "current_path": current_path, "list_type": list_type, "directories": directories, "files": files, "success": True, } <NEW_LINE> return path_data
Returns an object filled with data pertaining to a particular path :param current_path: :param list_type: :return:
625941b97b25080760e392da
def create_hive_warehouse(self): <NEW_LINE> <INDENT> return self._cmd('hiveCreateHiveWarehouse')
Creates the Hive warehouse directory in HDFS :return: Reference to the submitted command. :since: API v3
625941b99b70327d1c4e0c53
def start_val_range(self): <NEW_LINE> <INDENT> user_choice_min = input("Please enter a whole number for the minimum for the start value: ") <NEW_LINE> while not (user_choice_min.isnumeric): <NEW_LINE> <INDENT> print("You've entered an invalid entry") <NEW_LINE> user_choice_min = input("Please enter a whole number for the minimum for the start value: ") <NEW_LINE> <DEDENT> user_choice_max = input(("Please enter a whole number for the maximum for the start value: ")) <NEW_LINE> while not (user_choice_max.isnumeric): <NEW_LINE> <INDENT> print("You've entered an invalid entry") <NEW_LINE> user_choice_max = input("Please enter a whole number for the maximum for the start value: ") <NEW_LINE> <DEDENT> return (int(user_choice_min), int(user_choice_max))
(GameView, Tuple) -> Tuple of str
625941b9b545ff76a8913c9e
def _update_container_metrics(self, instance, subcontainer, kube_labels): <NEW_LINE> <INDENT> tags = list(instance.get('tags', [])) <NEW_LINE> if len(subcontainer.get('aliases', [])) >= 1: <NEW_LINE> <INDENT> container_name = subcontainer['aliases'][0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> container_name = subcontainer['name'] <NEW_LINE> <DEDENT> tags.append('container_name:%s' % container_name) <NEW_LINE> container_image = self.kubeutil.image_name_resolver(subcontainer['spec'].get('image')) <NEW_LINE> if container_image: <NEW_LINE> <INDENT> tags.append('container_image:%s' % container_image) <NEW_LINE> split = container_image.split(":") <NEW_LINE> if len(split) > 2: <NEW_LINE> <INDENT> split = [':'.join(split[:-1]), split[-1]] <NEW_LINE> <DEDENT> tags.append('image_name:%s' % split[0]) <NEW_LINE> if len(split) == 2: <NEW_LINE> <INDENT> tags.append('image_tag:%s' % split[1]) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> cont_labels = subcontainer['spec']['labels'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.log.debug("Subcontainer, doesn't have any labels") <NEW_LINE> cont_labels = {} <NEW_LINE> <DEDENT> if KubeUtil.NAMESPACE_LABEL in cont_labels and KubeUtil.POD_NAME_LABEL in cont_labels: <NEW_LINE> <INDENT> tags += self._get_post_1_2_tags(cont_labels, subcontainer, kube_labels) <NEW_LINE> <DEDENT> elif KubeUtil.POD_NAME_LABEL in cont_labels: <NEW_LINE> <INDENT> tags += self._get_pre_1_2_tags(cont_labels, subcontainer, kube_labels) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tags.append("pod_name:no_pod") <NEW_LINE> <DEDENT> is_filtered = self.kubeutil.are_tags_filtered(tags) <NEW_LINE> if is_filtered: <NEW_LINE> <INDENT> self._filtered_containers.add(subcontainer['id']) <NEW_LINE> return tags <NEW_LINE> <DEDENT> stats = subcontainer['stats'][-1] <NEW_LINE> self._publish_raw_metrics(NAMESPACE, stats, tags) <NEW_LINE> if subcontainer.get("spec", {}).get("has_filesystem") and stats.get('filesystem', []) != []: <NEW_LINE> <INDENT> fs = stats['filesystem'][-1] <NEW_LINE> fs_utilization = float(fs['usage'])/float(fs['capacity']) <NEW_LINE> self.publish_gauge(self, NAMESPACE + '.filesystem.usage_pct', fs_utilization, tags) <NEW_LINE> <DEDENT> if subcontainer.get("spec", {}).get("has_network"): <NEW_LINE> <INDENT> net = stats['network'] <NEW_LINE> self.publish_rate(self, NAMESPACE + '.network_errors', sum(float(net[x]) for x in NET_ERRORS), tags) <NEW_LINE> <DEDENT> return tags
Publish metrics for a subcontainer and handle filtering on tags
625941b924f1403a926009e9
def zpstr(val): <NEW_LINE> <INDENT> return unicode(val) if val is not None else None
Transfer value to string return string if right value
625941b97cff6e4e81117805
def set(self, value): <NEW_LINE> <INDENT> assert self._port_type == PortType.Output, "You can't set value of parameter in Connection " "object via this port in this unit because it's not output port" <NEW_LINE> self.value = value <NEW_LINE> self._linked_connection.value = value
Задает хранимое в соединении значении
625941b95166f23b2e1a4fd9
def shaderFromFile(shaderType, shaderFile): <NEW_LINE> <INDENT> shaderSrc = '' <NEW_LINE> with open(shaderFile) as sf: <NEW_LINE> <INDENT> shaderSrc = sf.read() <NEW_LINE> <DEDENT> return shaders.compileShader(shaderSrc, shaderType)
create shader from file
625941b94e4d5625662d425c
def _populate_exp(self): <NEW_LINE> <INDENT> old_s = self._current_ob <NEW_LINE> if self.rng.rand() <= self.exploration or (len(self.mem) <= self.history_len): <NEW_LINE> <INDENT> act = self.rng.choice(range(self.num_actions)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> history = self.mem.recent_state() <NEW_LINE> history.append(old_s) <NEW_LINE> history = np.stack(history, axis=2) <NEW_LINE> q_values = self.predictor(history[None, :, :, :])[0][0] <NEW_LINE> act = np.argmax(q_values) <NEW_LINE> <DEDENT> self._current_ob, reward, isOver, info = self.player.step(act) <NEW_LINE> self._current_game_score.feed(reward) <NEW_LINE> if isOver: <NEW_LINE> <INDENT> if info['ale.lives'] == 0: <NEW_LINE> <INDENT> self._player_scores.feed(self._current_game_score.sum) <NEW_LINE> self._current_game_score.reset() <NEW_LINE> <DEDENT> self.player.reset() <NEW_LINE> <DEDENT> self.mem.append(Experience(old_s, act, reward, isOver))
populate a transition by epsilon-greedy
625941b9498bea3a759b9930
def test_datetime_str(): <NEW_LINE> <INDENT> date = datetime.datetime.now() <NEW_LINE> date_str = str(date) <NEW_LINE> new_date = get_date(date_str) <NEW_LINE> assert new_date == date
Test get a datetime object the string is a datetime str
625941b9f548e778e58cd3fc
def findWords(self, words): <NEW_LINE> <INDENT> s1 = {'q', 'w', 'e', 'r', 't', 'y', 'u', 'i', 'o', 'p'} <NEW_LINE> s2 = {'a', 's', 'd', 'f', 'g', 'h', 'j', 'k', 'l'} <NEW_LINE> s3 = {'z', 'x', 'c', 'v', 'b', 'n', 'm'} <NEW_LINE> res = [] <NEW_LINE> for w in words: <NEW_LINE> <INDENT> if set(w.lower())-s1 == set() or set(w.lower())-s2 == set() or set(w.lower())-s3 == set(): <NEW_LINE> <INDENT> res.append(w) <NEW_LINE> <DEDENT> <DEDENT> return res
:type words: List[str] :rtype: List[str]
625941b963f4b57ef0000fa1
def optimisation(silo,choix): <NEW_LINE> <INDENT> if choix=="Prix min": <NEW_LINE> <INDENT> compt=4 <NEW_LINE> minmax="min" <NEW_LINE> <DEDENT> if choix=="Prix max": <NEW_LINE> <INDENT> compt=4 <NEW_LINE> minmax="max" <NEW_LINE> <DEDENT> if choix=="Diamètre min": <NEW_LINE> <INDENT> compt=1 <NEW_LINE> minmax="min" <NEW_LINE> <DEDENT> if choix=="Diamètre max": <NEW_LINE> <INDENT> compt=1 <NEW_LINE> minmax="max" <NEW_LINE> <DEDENT> if choix=="Hauteur min": <NEW_LINE> <INDENT> compt=0 <NEW_LINE> minmax="min" <NEW_LINE> <DEDENT> if choix=="Hauteur max": <NEW_LINE> <INDENT> compt=0 <NEW_LINE> minmax="max" <NEW_LINE> <DEDENT> if choix=="Epaisseur min": <NEW_LINE> <INDENT> compt=3 <NEW_LINE> minmax="min" <NEW_LINE> <DEDENT> if choix=="Epaisseur max": <NEW_LINE> <INDENT> compt=3 <NEW_LINE> minmax="max" <NEW_LINE> <DEDENT> if choix=="Volume min": <NEW_LINE> <INDENT> compt=2 <NEW_LINE> minmax="min" <NEW_LINE> <DEDENT> if choix=="Volume max": <NEW_LINE> <INDENT> compt=2 <NEW_LINE> minmax="max" <NEW_LINE> <DEDENT> if minmax=="min": <NEW_LINE> <INDENT> min=silo[0][compt] <NEW_LINE> rgmin=0 <NEW_LINE> for i in range(0,len(silo)): <NEW_LINE> <INDENT> if silo[i][compt]<min: <NEW_LINE> <INDENT> min=silo[i][compt] <NEW_LINE> rgmin=i <NEW_LINE> <DEDENT> <DEDENT> return silo[rgmin] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max=silo[0][compt] <NEW_LINE> rgmax=0 <NEW_LINE> for i in range(0,len(silo)): <NEW_LINE> <INDENT> if silo[i][compt]>max: <NEW_LINE> <INDENT> max=silo[i][compt] <NEW_LINE> rgmax=i <NEW_LINE> <DEDENT> <DEDENT> return silo[rgmax]
Programme qui choisi le silo optimal selon un critère précis
625941b95fc7496912cc3806
def __init__(self, traindata, labels, transform=train_tfms): <NEW_LINE> <INDENT> self.transform = transform <NEW_LINE> self.data = traindata <NEW_LINE> self.labels = labels
datapath: dicom data path
625941b90a366e3fb873e697
def getPoseState(self, noBake=False): <NEW_LINE> <INDENT> poseState = self.__currentAnim.getAtTime(self.__playTime, noBake) <NEW_LINE> if self.__inPlace: <NEW_LINE> <INDENT> poseState = poseState.copy() <NEW_LINE> poseState[:,:3,3] = np.zeros((poseState.shape[0],3), dtype=np.float32) <NEW_LINE> <DEDENT> return poseState
Get the pose matrices of the active animation at the current play time. Returned matrices are baked (they are skin matrices, relative to bone rest pose in object space) if the active animation is baked, otherwise they are plain pose matrices in local bone space.
625941b901c39578d7e74cc3
def make(self): <NEW_LINE> <INDENT> logging.debug('Doing domain %s' % (self.dt['name'], )) <NEW_LINE> st='' <NEW_LINE> st+=self.make_soa() <NEW_LINE> st+=self.make_toplevel() <NEW_LINE> st+=self.make_subzones() <NEW_LINE> st+='\n' <NEW_LINE> st+=self.make_hosts() <NEW_LINE> return(st)
! @param incserial If True then increment the serial number
625941b9046cf37aa974cbca
def expectation(self, operator): <NEW_LINE> <INDENT> if isinstance(operator, QubitOperator): <NEW_LINE> <INDENT> expectation_value = 0. <NEW_LINE> for qubit_term in operator: <NEW_LINE> <INDENT> expectation += qubit_term_expectation(self, qubit_term) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(operator, InteractionOperator): <NEW_LINE> <INDENT> expectation = operator.constant <NEW_LINE> expectation += numpy.sum(self.one_body_tensor * operator.one_body_tensor) <NEW_LINE> expectation += numpy.sum(self.two_body_tensor * operator.two_body_tensor) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InteractionRDMError('Invalid operator type provided.') <NEW_LINE> <DEDENT> return expectation
Return expectation value of an InteractionRDM with an operator. Args: operator: A QubitOperator or InteractionOperator. Returns: float: Expectation value Raises: InteractionRDMError: Invalid operator provided.
625941b994891a1f4081b928
def __init__(self): <NEW_LINE> <INDENT> self.LogExpireDay = None <NEW_LINE> self.LogType = None <NEW_LINE> self.RequestId = None
:param LogExpireDay: 审计日志保存时长。目前支持的值包括:[0,30,180,365,1095,1825]。 注意:此字段可能返回 null,表示取不到有效值。 :type LogExpireDay: int :param LogType: 审计日志存储类型。目前支持的值包括:"storage" - 存储型。 :type LogType: str :param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。 :type RequestId: str
625941b96fece00bbac2d5bb
def createAsyncIOService(allow_epoll = True, allow_kqueue = True, allow_poll = True, allow_select = True): <NEW_LINE> <INDENT> pollerClassesToTry = [] <NEW_LINE> if allow_epoll: <NEW_LINE> <INDENT> pollerClassesToTry.append(_EPollPoller) <NEW_LINE> <DEDENT> if allow_kqueue: <NEW_LINE> <INDENT> pollerClassesToTry.append(_KQueuePoller) <NEW_LINE> <DEDENT> if allow_poll: <NEW_LINE> <INDENT> pollerClassesToTry.append(_PollPoller) <NEW_LINE> <DEDENT> if allow_select: <NEW_LINE> <INDENT> pollerClassesToTry.append(_SelectPoller) <NEW_LINE> <DEDENT> poller = None <NEW_LINE> for pollerClass in pollerClassesToTry: <NEW_LINE> <INDENT> if pollerClass._isAvailable(): <NEW_LINE> <INDENT> poller = pollerClass() <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if poller is None: <NEW_LINE> <INDENT> raise AsyncException('Unable to create poller') <NEW_LINE> <DEDENT> return AsyncIOService(poller = poller)
Create an AsyncIOService supported by the platform and parameters.
625941b9c4546d3d9de728b1
def read_comments(self, df, data_f): <NEW_LINE> <INDENT> start = self.config_obj.start <NEW_LINE> end = self.config_obj.end <NEW_LINE> modified = self.config_obj.modified <NEW_LINE> if df is None: <NEW_LINE> <INDENT> name = self.util_obj.get_comments_filename(modified) <NEW_LINE> filename = data_f + name <NEW_LINE> if self.util_obj.check_file(filename): <NEW_LINE> <INDENT> df = pd.read_csv(filename, nrows=end) <NEW_LINE> df = df[start:] <NEW_LINE> df = df.reset_index() <NEW_LINE> df = df.drop(['index'], axis=1) <NEW_LINE> <DEDENT> <DEDENT> return df
Reads the comments if the dataframe is empty. df: comments dataframe. data_f: data folder. Returns the coments dataframe.
625941b930dc7b76659017ea
def SetFont(self, *args, **kwargs): <NEW_LINE> <INDENT> wx.TextCtrl.SetFont(self, *args, **kwargs) <NEW_LINE> if self._autofit: <NEW_LINE> <INDENT> self.SetClientSize(self._CalcSize()) <NEW_LINE> width = self.GetSize().width <NEW_LINE> height = self.GetBestSize().height <NEW_LINE> self.SetInitialSize((width, height))
Set the font, then recalculate control size, if appropriate. see :meth:`TextCtrl.SetFont` for valid arguements
625941b9dd821e528d63b02b
@ResponderRegistrar.responder('rebaser', MergeRequestActions.COMMENTED) <NEW_LINE> def apply_command_on_merge_request( pr: MergeRequest, comment: Comment, enable_rebase: bool=False, enable_merge: bool=False, enable_fastforward: bool=False, merge_admin_only: bool=True, fastforward_admin_only: bool=True, ): <NEW_LINE> <INDENT> username = Repository.from_igitt_repo(pr.repository).user.username <NEW_LINE> cmd, cmd_past = get_matched_command(comment.body, username) <NEW_LINE> enabled_cmd = { 'rebase': enable_rebase, 'merge': enable_merge, 'fastforward': enable_fastforward }.get(cmd) <NEW_LINE> if enabled_cmd: <NEW_LINE> <INDENT> if not verify_command_access(comment, merge_admin_only, fastforward_admin_only, cmd): <NEW_LINE> <INDENT> pr.add_comment( f'Hey @{comment.author.username}, you do not have the access ' f'to perform the {cmd} action with [GitMate.io]' '(https://gitmate.io). Please ask a maintainer to give you ' 'access. :warning:') <NEW_LINE> return <NEW_LINE> <DEDENT> pr.add_comment( f'Hey! I\'m [GitMate.io](https://gitmate.io)! This pull request is' f' being {cmd_past} automatically. Please **DO NOT** push while ' f'{cmd} is in progress or your changes would be lost permanently ' ':warning:') <NEW_LINE> head_clone_url = pr.source_repository.clone_url <NEW_LINE> base_clone_url = pr.target_repository.clone_url <NEW_LINE> output = run_in_container(settings.REBASER_IMAGE, 'python', 'run.py', cmd, head_clone_url, base_clone_url, pr.head_branch_name, pr.base_branch_name) <NEW_LINE> output = json.loads(output) <NEW_LINE> if output['status'] == 'success': <NEW_LINE> <INDENT> pr.add_comment( f'Automated {cmd} with [GitMate.io](https://gitmate.io) was ' 'successful! :tada:') <NEW_LINE> <DEDENT> elif 'error' in output: <NEW_LINE> <INDENT> error = output['error'].replace(head_clone_url, '<hidden_oauth_token>') <NEW_LINE> error = error.replace(base_clone_url, '<hidden_oauth_token>') <NEW_LINE> pr.add_comment(f'Automated {cmd} failed! Please {cmd} your pull ' 'request manually via the command line.\n\n' 'Reason:\n```\n{}\n```'.format(error))
Performs a merge, fastforward or rebase of a merge request when an authorized user posts a command mentioning the keywords ``merge``, ``fastforward``/``ff`` or ``rebase`` respectively. e.g. ``@gitmate-bot rebase`` rebases the pull request with master.
625941b96fb2d068a760ef21
def test_remove_existing_classifiers(tmpdir): <NEW_LINE> <INDENT> test_file = setup_local_copy("sample_files/sample_setup_file.py", tmpdir) <NEW_LINE> setup_file_modernizer = SetupFileModernizer() <NEW_LINE> file_data = open(test_file).read() <NEW_LINE> updated_file_data = setup_file_modernizer._remove_outdated_classifiers(file_data) <NEW_LINE> assert "'Framework :: Django :: 3.1'" not in updated_file_data <NEW_LINE> assert "'Framework :: Django :: 3.0'" not in updated_file_data <NEW_LINE> assert "'Framework :: Django :: 2.2'" not in updated_file_data <NEW_LINE> assert "'Framework :: Django :: 3.2'" in updated_file_data
Test the case where old classifiers are removed
625941b9cc40096d615957d3
@task <NEW_LINE> def resync(): <NEW_LINE> <INDENT> sudo("service ntp stop") <NEW_LINE> sudo("ntpdate -B ntp.ubuntu.com") <NEW_LINE> sudo("service ntp start")
Forcibly resynchronise the VM's NTP clock. If a VM's clock manages to get sufficiently out of sync, ntp will give up, forcing a manual intervention.
625941b9b7558d58953c4d9b
def char_read(self, uuid): <NEW_LINE> <INDENT> raise NotImplementedError()
Reads a Characteristic by UUID. uuid -- UUID of Characteristic to read as a string. Returns a bytearray containing the characteristic value on success. Example: my_ble_device.char_read('a1e8f5b1-696b-4e4c-87c6-69dfe0b0093b')
625941b93c8af77a43ae361d
def test_35_to_string(self): <NEW_LINE> <INDENT> ctx = CryptContext(**self.sample_1_dict) <NEW_LINE> dump = ctx.to_string() <NEW_LINE> import sys <NEW_LINE> if sys.version_info >= (2,7): <NEW_LINE> <INDENT> self.assertEqual(dump, self.sample_1_unicode) <NEW_LINE> <DEDENT> ctx2 = CryptContext.from_string(dump) <NEW_LINE> self.assertEqual(ctx2.to_dict(), self.sample_1_dict) <NEW_LINE> other = ctx.to_string(section="password-security") <NEW_LINE> self.assertEqual(other, dump.replace("[passlib]","[password-security]")) <NEW_LINE> from passlib.tests.test_utils_handlers import UnsaltedHash <NEW_LINE> ctx3 = CryptContext([UnsaltedHash, "md5_crypt"]) <NEW_LINE> dump = ctx3.to_string() <NEW_LINE> self.assertRegex(dump, r"# NOTE: the 'unsalted_test_hash' handler\(s\)" r" are not registered with Passlib")
test to_string() method
625941b97047854f462a128d
def yaml_dumps( data: YAMLInput, indent_mapping: int = 2, indent_sequence: int = 4, indent_offset: int = 2, sort_keys: bool = False, ) -> str: <NEW_LINE> <INDENT> yaml = CustomYaml() <NEW_LINE> yaml.sort_base_mapping_type_on_output = sort_keys <NEW_LINE> yaml.indent(mapping=indent_mapping, sequence=indent_sequence, offset=indent_offset) <NEW_LINE> return yaml.dump(data)
Serialize an object to a YAML string. See the ruamel.yaml docs on indentation for more details on the expected format. https://yaml.readthedocs.io/en/latest/detail.html?highlight=indentation#indentation-of-block-sequences data: The YAML-serializable data. indent_mapping (int): Mapping indentation. indent_sequence (int): Sequence indentation. indent_offset (int): Indentation offset. sort_keys (bool): Sort dictionary keys. RETURNS (str): The serialized string.
625941b9a79ad161976cbfc6
def forced_checkout_of_dir_with_file_obstructions(sbox): <NEW_LINE> <INDENT> make_local_tree(sbox, False, False) <NEW_LINE> url = sbox.repo_url <NEW_LINE> wc_dir_other = sbox.add_wc_path('other') <NEW_LINE> other_A = os.path.join(wc_dir_other, 'A') <NEW_LINE> os.makedirs(wc_dir_other) <NEW_LINE> svntest.main.file_write(other_A, 'The file A\n') <NEW_LINE> expected_output = svntest.wc.State(wc_dir_other, { 'iota' : Item(status='A '), 'A' : Item(status=' ', treeconflict='C'), 'A/mu' : Item(status=' ', treeconflict='A'), 'A/D' : Item(status=' ', treeconflict='A'), 'A/D/G' : Item(status=' ', treeconflict='A'), 'A/D/G/tau' : Item(status=' ', treeconflict='A'), 'A/D/G/pi' : Item(status=' ', treeconflict='A'), 'A/D/G/rho' : Item(status=' ', treeconflict='A'), 'A/D/H' : Item(status=' ', treeconflict='A'), 'A/D/H/psi' : Item(status=' ', treeconflict='A'), 'A/D/H/omega' : Item(status=' ', treeconflict='A'), 'A/D/H/chi' : Item(status=' ', treeconflict='A'), 'A/D/gamma' : Item(status=' ', treeconflict='A'), 'A/C' : Item(status=' ', treeconflict='A'), 'A/B' : Item(status=' ', treeconflict='A'), 'A/B/E' : Item(status=' ', treeconflict='A'), 'A/B/E/beta' : Item(status=' ', treeconflict='A'), 'A/B/E/alpha' : Item(status=' ', treeconflict='A'), 'A/B/F' : Item(status=' ', treeconflict='A'), 'A/B/lambda' : Item(status=' ', treeconflict='A'), }) <NEW_LINE> expected_disk = svntest.main.greek_state.copy() <NEW_LINE> expected_disk.remove('A/B', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', 'A/B/F', 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau', 'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/mu', 'A/C') <NEW_LINE> expected_disk.tweak('A', contents='The file A\n') <NEW_LINE> actions.run_and_verify_checkout(url, wc_dir_other, expected_output, expected_disk, None, None, None, None, '--force') <NEW_LINE> os.remove(other_A) <NEW_LINE> expected_output = svntest.wc.State(wc_dir_other, { }) <NEW_LINE> expected_disk = svntest.main.greek_state.copy() <NEW_LINE> expected_status = actions.get_virginal_state(wc_dir_other, 1) <NEW_LINE> svntest.main.run_svn(None, 'revert', '-R', os.path.join(wc_dir_other, 'A')) <NEW_LINE> actions.run_and_verify_update(wc_dir_other, expected_output, expected_disk, expected_status, None, None, None, None, None, False, wc_dir_other)
forced co flags conflict if a file obstructs a dir
625941b9498bea3a759b9931
def contain_each_other(self, names): <NEW_LINE> <INDENT> if not isinstance(names, list): names = list(names) <NEW_LINE> if len(names) <= 1: return <NEW_LINE> root = names.pop() <NEW_LINE> if root not in self.subvars: <NEW_LINE> <INDENT> self.subvars[root] = set() <NEW_LINE> <DEDENT> self.subvars[root].update(names) <NEW_LINE> for i in names: <NEW_LINE> <INDENT> if i not in self.subvars: <NEW_LINE> <INDENT> self.subvars[i] = set() <NEW_LINE> <DEDENT> self.subvars[i].add(root)
Update the state, such that the specified variables all contain each other
625941b90a50d4780f666d0f