code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def player_join(game_id, player_id): <NEW_LINE> <INDENT> app.games[game_id].add_player(player_id) <NEW_LINE> board = app.games[game_id].make_board() <NEW_LINE> return render_template("gameboard.html", board=board, game_id=game_id, player_id=player_id)
Player joins game.
625941b89b70327d1c4e0c2b
def __init__(self, patience=3, min_delta=0): <NEW_LINE> <INDENT> self.patience = patience <NEW_LINE> self.min_delta = min_delta <NEW_LINE> self.counter = 0 <NEW_LINE> self.best_loss = None <NEW_LINE> self.early_stop = False
:param patience: how many epochs to wait before stopping when loss is not improving :param min_delta: minimum difference between new loss and old loss for new loss to be considered as an improvement
625941b821a7993f00bc7b41
def pvar(self, dtype=None, out=None, ddof=0): <NEW_LINE> <INDENT> dtype = self.dtype if dtype is None else dtype <NEW_LINE> return process(t=self.t, x=self.var(axis=-1, dtype=dtype, out=out, ddof=ddof, keepdims=True))
One path process exposing for each time point the variance of process values across paths.
625941b8eab8aa0e5d26d9b6
def make_fileitem_peinfo_digitalsignature_signatureverified(sig_verified, condition='is', negate=False): <NEW_LINE> <INDENT> document = 'FileItem' <NEW_LINE> search = 'FileItem/PEInfo/DigitalSignature/SignatureVerified' <NEW_LINE> content_type = 'bool' <NEW_LINE> content = sig_verified <NEW_LINE> ii_node = ioc_api.make_indicatoritem_node(condition, document, search, content_type, content, negate=negate) <NEW_LINE> return ii_node
Create a node for FileItem/PEInfo/DigitalSignature/SignatureVerified :return: A IndicatorItem represented as an Element node
625941b8925a0f43d2549ccb
def IoU(gt, pr): <NEW_LINE> <INDENT> if gt.ndim != pr.ndim: <NEW_LINE> <INDENT> raise ValueError('Targets have different shapes: {} and {}'.format(gt.ndim, pr.ndim)) <NEW_LINE> <DEDENT> gt = __channels_flatten(gt) <NEW_LINE> pr = __channels_flatten(pr) <NEW_LINE> intersection = (gt * pr).sum(axis=0) <NEW_LINE> union = (gt + pr).sum(axis=0) - intersection <NEW_LINE> return (intersection + EPS) / (union + EPS)
Args: gt: raster, 2D or 3D array with shape (H, W) or (H, W, C) pr: raster, 2D or 3D array with shape (H, W) or (H, W, C) Returns: IoU score for each channel
625941b8e64d504609d74698
def main(args): <NEW_LINE> <INDENT> query_set = DataFile.objects.filter( institute__short_name='MPI-M', experiment__short_name='spinup-1950', version='v20171003' ) <NEW_LINE> logger.debug('{} files found'.format(query_set.count())) <NEW_LINE> directories_found = [] <NEW_LINE> for df in query_set: <NEW_LINE> <INDENT> if df.online: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(os.path.join(df.directory, df.name)) <NEW_LINE> <DEDENT> except OSError as exc: <NEW_LINE> <INDENT> logger.error(str(exc)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if df.directory not in directories_found: <NEW_LINE> <INDENT> directories_found.append(df.directory) <NEW_LINE> <DEDENT> <DEDENT> df.online = False <NEW_LINE> df.directory = None <NEW_LINE> df.save() <NEW_LINE> <DEDENT> <DEDENT> for directory in directories_found: <NEW_LINE> <INDENT> if not os.listdir(directory): <NEW_LINE> <INDENT> delete_drs_dir(directory) <NEW_LINE> <DEDENT> <DEDENT> logger.debug('{} directories removed'.format(len(directories_found))) <NEW_LINE> replace_files(query_set)
Main entry point
625941b87cff6e4e811177dd
def format_as_float_or_array(name, values, val_if_none=0.0, flatten=False): <NEW_LINE> <INDENT> if isinstance(values, np.ndarray): <NEW_LINE> <INDENT> if flatten: <NEW_LINE> <INDENT> values = values.flatten() <NEW_LINE> <DEDENT> <DEDENT> elif not isinstance(values, string_types) and isinstance(values, Iterable): <NEW_LINE> <INDENT> values = np.asarray(values, dtype=float) <NEW_LINE> if flatten: <NEW_LINE> <INDENT> values = values.flatten() <NEW_LINE> <DEDENT> <DEDENT> elif values is None: <NEW_LINE> <INDENT> values = val_if_none <NEW_LINE> <DEDENT> elif values == float('inf'): <NEW_LINE> <INDENT> values = openmdao.INF_BOUND <NEW_LINE> <DEDENT> elif values == -float('inf'): <NEW_LINE> <INDENT> values = -openmdao.INF_BOUND <NEW_LINE> <DEDENT> elif isinstance(values, numbers.Number): <NEW_LINE> <INDENT> values = float(values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Expected values of {0} to be an Iterable of ' 'numeric values, or a scalar numeric value. ' 'Got {1} instead.'.format(name, values)) <NEW_LINE> <DEDENT> return values
Format array option values. Checks that the given array values are either None, float, or an iterable of numeric values. On output all interables of numeric values are converted to a flat np.ndarray. If values is scalar, it is converted to float. Parameters ---------- name : str The path of the variable relative to the current system. values : float or numpy ndarray or Iterable Values of the array option to be formatted to the expected form. val_if_none : float or numpy ndarray The default value for the option if values is None. flatten : bool Set to True to flatten any ndarray return. Returns ------- float or np.ndarray Values transformed to the expected form. Raises ------ ValueError If values is Iterable but cannot be converted to a numpy ndarray TypeError If values is scalar, not None, and not a Number.
625941b82c8b7c6e89b3561b
def main(): <NEW_LINE> <INDENT> config.ConfigParser() <NEW_LINE> config.read('dwh.cfg') <NEW_LINE> conn=psycopg2.connect("host={} dbname={} user={} password={} port={}".format(*config['CLUSTER'].values())) <NEW_LINE> cur=conn.cursor() <NEW_LINE> get_results(cur, conn) <NEW_LINE> conn.close()
Run queries on the staging and dimensional tables to validate that the project has been created successfully
625941b896565a6dacc8f52c
def unauthorized(): <NEW_LINE> <INDENT> url = request.script_root + request.path <NEW_LINE> flash(_("You do not have permission to access '%(url)s'.", url=url), 'error') <NEW_LINE> user_manager = current_app.user_manager <NEW_LINE> return redirect(_endpoint_url(user_manager.unauthorized_endpoint))
Prepare a Flash message and redirect to USER_UNAUTHORIZED_URL
625941b85166f23b2e1a4fb0
def sendMouseEvent(self, f): <NEW_LINE> <INDENT> pass
frameCached(f) -> Bool Determine whether frame /f/ is known to be in the memory cache. :param f:
625941b87047854f462a1265
def KnownFolderID(subcon): <NEW_LINE> <INDENT> return Enum(subcon, **CSIDL)
Converts an integer to a CSIDL (KNownFolderID) value >>> KnownFolderID(Int32ul).build("CSIDL_SYSTEM") '%\x00\x00\x00' >>> str(KnownFolderID(Int32ul).parse("\x18\x00\x00\x00")) 'CSIDL_COMMON_STARTUP'
625941b8009cb60464c63214
def chech_for_errors(self): <NEW_LINE> <INDENT> if not self.state or self.state != self.sess['state']: <NEW_LINE> <INDENT> return template.lmsg(_('Detected a possible request forge')) <NEW_LINE> <DEDENT> if self.error: <NEW_LINE> <INDENT> return template.lmsg(self.error) <NEW_LINE> <DEDENT> if not self.code: <NEW_LINE> <INDENT> return template.lmsg(_('Invalid google login')) <NEW_LINE> <DEDENT> return False
Look for errors. Query string can contain an error parameter. The state can be different from what we sent in the oauth start Code parameter is mandatory.
625941b8b7558d58953c4d73
def set(self, section, option, value=None): <NEW_LINE> <INDENT> self.add_section(section) <NEW_LINE> RawConfigParser.set(self, section, option, value)
Set an option, creating the section if needed.
625941b8a219f33f346287cc
def dt(): <NEW_LINE> <INDENT> data = pd.read_csv('http://biostat.mc.vanderbilt.edu/wiki/pub/Main/DataSets/titanic.txt') <NEW_LINE> print(data.shape) <NEW_LINE> x = data[['pclass','age','sex']] <NEW_LINE> y = data['survived'] <NEW_LINE> x['age'].fillna(x['age'].mean(),inplace=True) <NEW_LINE> x = pd.get_dummies(x,sparse=True) <NEW_LINE> x_train,x_test, y_train,y_test = train_test_split(x,y,test_size=0.25,random_state = 666) <NEW_LINE> print(x_train) <NEW_LINE> dst = DecisionTreeClassifier() <NEW_LINE> dst.fit(x_train,y_train) <NEW_LINE> rf = RandomForestClassifier(random_state=666) <NEW_LINE> param = {'n_estimators':[80,120,200,300,500,800,1200],'max_depth':[5,8,15,25,30]} <NEW_LINE> gc = GridSearchCV(rf,param_grid=param,cv=2) <NEW_LINE> gc.fit(x_train,y_train) <NEW_LINE> print('预测的准确率为:',gc.score(x_test,y_test)) <NEW_LINE> print('查看最好的参数模型:',gc.best_params_ ) <NEW_LINE> return None
决策树对泰坦尼克号进行预测生死 :renturn None
625941b84d74a7450ccd401a
def _build_chain(G, u, v, visited): <NEW_LINE> <INDENT> while v not in visited: <NEW_LINE> <INDENT> yield u, v <NEW_LINE> visited.add(v) <NEW_LINE> u, v = v, G.nodes[v]['parent'] <NEW_LINE> <DEDENT> yield u, v
Generate the chain starting from the given nontree edge. `G` is a DFS cycle graph as constructed by :func:`_dfs_cycle_graph`. The edge (`u`, `v`) is a nontree edge that begins a chain. `visited` is a set representing the nodes in `G` that have already been visited. This function yields the edges in an initial segment of the fundamental cycle of `G` starting with the nontree edge (`u`, `v`) that includes all the edges up until the first node that appears in `visited`. The tree edges are given by the 'parent' node attribute. The `visited` set is updated to add each node in an edge yielded by this function.
625941b8a8370b77170526f8
def dump_buffer(outbuffer): <NEW_LINE> <INDENT> conv = str <NEW_LINE> for tup in outbuffer: <NEW_LINE> <INDENT> _ = sys.stdout.write(tup[0] + '\t' + conv(tup[1]) + '\t' + conv(tup[2]) + '\t' + tup[3] + '\t' + conv(tup[4]) + '\t' + tup[5] + '\t' + conv(tup[6]) + '\t' + conv(tup[7]) + '\t' + tup[8] + '\n') <NEW_LINE> <DEDENT> return
:param outbuffer: :return:
625941b830dc7b76659017c2
def test_return_invalid_rtype(): <NEW_LINE> <INDENT> result = dictor(BASIC, "spaceballs.genre", rtype="int") <NEW_LINE> eq_("comedy", result)
test string to int invalid rtype
625941b8507cdc57c6306b2b
def close(self): <NEW_LINE> <INDENT> if self.isEstablished(): <NEW_LINE> <INDENT> self._stateManager.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("No connection established to close.")
Closes connection if connection is established
625941b80a366e3fb873e66f
def get_date_format(_format='%Y-%m-%d %H:%M:%S'): <NEW_LINE> <INDENT> return str(datetime.datetime.now().strftime(_format))
返回时间字符串
625941b8adb09d7d5db6c5eb
def test_template_loaders(self): <NEW_LINE> <INDENT> schema = DjangoConfigurationSchema() <NEW_LINE> raw_settings = self.minimal_settings <NEW_LINE> raw_settings['TEMPLATE_LOADERS'] = [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ] <NEW_LINE> cleaned_settings = schema.deserialize(raw_settings) <NEW_LINE> self.assertEqual(cleaned_settings['TEMPLATE_LOADERS'], raw_settings['TEMPLATE_LOADERS']) <NEW_LINE> raw_settings = self.minimal_settings <NEW_LINE> raw_settings['TEMPLATE_LOADERS'] = [ 'some.string', ( 'django.template.loaders.cached.Loader', ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ) ), ] <NEW_LINE> cleaned_settings = schema.deserialize(raw_settings) <NEW_LINE> self.assertEqual(cleaned_settings['TEMPLATE_LOADERS'], raw_settings['TEMPLATE_LOADERS'])
TEMPLATE_LOADERS accepts either strings or tuples.
625941b84428ac0f6e5ba64a
def get_sensors_list(): <NEW_LINE> <INDENT> rc, output = openbmctool_execute_command("sensors list", print_output=False, ignore_err=False) <NEW_LINE> return vf.outbuf_to_report(output, field_delim="|")
Get the output of the sensors list command and return as a list of dictionaries. Example robot code: ${sensors_list}= Get Sensors List Rprint Vars sensors_list fmt=1 Example result (excerpt): sensors_list: sensors_list[0]: [sensor]: OCC0 [type]: Discrete [units]: N/A [value]: Active [target]: Active sensors_list[1]: [sensor]: OCC1 [type]: Discrete [units]: N/A [value]: Active [target]: Active ...
625941b894891a1f4081b900
def tri_naive_slerp(bary, base_pts): <NEW_LINE> <INDENT> angle = xmath.central_angle_equilateral(base_pts) <NEW_LINE> b = np.sin(angle * bary) / np.sin(angle) <NEW_LINE> return b.dot(base_pts)
Naive slerp (spherical linear interpolation) on a spherical triangle. Args: bary: Array, shape [..., 3]. Barycentric coordinates. base_pts: Array, shape [3, ..., 3]. Coordinates of the triangle. Should be in counterclockwise order to maintain orientation. Returns: An array of shape [..., 3], representing points in 3d-space. >>> tri_naive_slerp(_TEST_BARY, _TEST_EQ_TRI) array([[ 1. , 0. , 0. ], [ 0.95105652, 0.30901699, 0. ], [ 0.80901699, 0.30901699, 0.30901699], [ 0.58778525, 0.30901699, 0.58778525], [ 0.30901699, 0.70710678, 0.4539905 ], [ 0. , 0.70710678, 0.70710678]])
625941b8f548e778e58cd3d4
def list( self, resource_group_name, provisioning_service_name, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Devices/provisioningServices/{provisioningServiceName}/certificates' <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'provisioningServiceName': self._serialize.url("provisioning_service_name", provisioning_service_name, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send(request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> raise models.ErrorDetailsException(self._deserialize, response) <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> deserialized = self._deserialize('CertificateListDescription', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Get all the certificates tied to the provisioning service. :param resource_group_name: Name of resource group. :type resource_group_name: str :param provisioning_service_name: Name of provisioning service to retrieve certificates for. :type provisioning_service_name: str :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: CertificateListDescription or ClientRawResponse if raw=true :rtype: ~azure.mgmt.iothubprovisioningservices.models.CertificateListDescription or ~msrest.pipeline.ClientRawResponse :raises: :class:`ErrorDetailsException<azure.mgmt.iothubprovisioningservices.models.ErrorDetailsException>`
625941b8d18da76e2353232a
def train(self, X_train, epochs: int=10): <NEW_LINE> <INDENT> half_batch = self.batch_size // 2 <NEW_LINE> for cnt in range(epochs): <NEW_LINE> <INDENT> random_index = np.random.randint(0, len(X_train) - half_batch) <NEW_LINE> self._log('Random Index: {}'.format(random_index)) <NEW_LINE> legit_images = X_train[random_index: random_index + half_batch] .reshape(half_batch, self.width, self.height, self.channels) <NEW_LINE> gen_noise = np.random.normal(0, 1, (half_batch, 100)) <NEW_LINE> syntetic_images = self.G.predict(gen_noise) <NEW_LINE> x_combined_batch = np.concatenate((legit_images, syntetic_images)) <NEW_LINE> y_combined_batch = np.concatenate((np.ones((half_batch, 1)), np.zeros((half_batch, 1)))) <NEW_LINE> d_loss = self.D.train_on_batch(x_combined_batch, y_combined_batch) <NEW_LINE> noise = np.random.normal(0, 1, (self.batch_size, 100)) <NEW_LINE> y_mislabled = np.ones((self.batch_size, 1)) <NEW_LINE> g_loss = self._model.train_on_batch(noise, y_mislabled) <NEW_LINE> self._log(('Epoch: {:,}, [Discriminator :: d_loss: {:.4f}],' '[Generator :: loss: {:.4f}]') .format(cnt, d_loss[0], g_loss)) <NEW_LINE> if cnt % self.save_interval == 0: <NEW_LINE> <INDENT> self.plot_images(step=cnt)
Train function to be used after GAN initialization X_train[np.array]: full set of images to be used
625941b8046cf37aa974cba3
def _local_dist(self, x, y): <NEW_LINE> <INDENT> M, m, d = x.size() <NEW_LINE> N, n, d = y.size() <NEW_LINE> x = x.contiguous().view(M * m, d) <NEW_LINE> y = y.contiguous().view(N * n, d) <NEW_LINE> dist_mat = self._euclidean_dist(x, y) <NEW_LINE> dist_mat = (torch.exp(dist_mat) - 1.) / (torch.exp(dist_mat) + 1.) <NEW_LINE> dist_mat = dist_mat.contiguous().view(M, m, N, n).permute(1, 3, 0, 2) <NEW_LINE> dist_mat = self._shortest_dist(dist_mat) <NEW_LINE> return dist_mat
Args: x: pytorch Variable, with shape [M, m, d] y: pytorch Variable, with shape [N, n, d] Returns: dist: pytorch Variable, with shape [M, N]
625941b8c4546d3d9de72888
def do_loop_turn(self): <NEW_LINE> <INDENT> logger.info("In loop") <NEW_LINE> time.sleep(1)
This function is called/used when you need a module with a loop function (and use the parameter 'external': True) Note: We are obliged to define this method (even if not called!) because it is an abstract function in the base class
625941b8be383301e01b52e5
def __doPreviewExam(self): <NEW_LINE> <INDENT> for lang in self.__languages: <NEW_LINE> <INDENT> self.__doCreateExam([], [str(self.__exercise)], "/tmp", time.strftime("%d.%m.%Y")+" --- 14h00 / PEII --- G120", "20,30,50,50", lang, True) <NEW_LINE> <DEDENT> if self.__usepdftk: <NEW_LINE> <INDENT> subprocess.call(["pdftk "+"/tmp/exam-*.pdf cat output /tmp/exam.pdf"], shell=True, cwd="./", stdout=open("/dev/stdout", 'w')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subprocess.call(["gs -q -dNOPAUSE -dBATCH -sDEVICE=pdfwrite -sOutputFile=/tmp/exam.pdf "+"/tmp/exam-*.pdf"], shell=True, cwd="./", stdout=open("/dev/stdout", 'w')) <NEW_LINE> <DEDENT> if self.__smscopencmd.find(",") == -1: <NEW_LINE> <INDENT> cmd = self.__smscopencmd <NEW_LINE> arg = [cmd, "/tmp/exam.pdf"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd = self.__smscopencmd.split(",")[0] <NEW_LINE> arg = self.__smscopencmd.split(",")[1:] <NEW_LINE> arg.append("/tmp/exam.pdf") <NEW_LINE> arg.insert(0, cmd) <NEW_LINE> <DEDENT> subprocess.Popen(cmd+" /tmp/exam.pdf", shell=True)
Generate a quick preview (pdf) of one exercise
625941b89f2886367277a6e9
def __init__(self, user_id=None, mode=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._user_id = None <NEW_LINE> self._mode = None <NEW_LINE> self.discriminator = None <NEW_LINE> if user_id is not None: <NEW_LINE> <INDENT> self.user_id = user_id <NEW_LINE> <DEDENT> if mode is not None: <NEW_LINE> <INDENT> self.mode = mode
UserInGroup - a model defined in OpenAPI
625941b88a43f66fc4b53ec2
@app.task(ignore_result=True) <NEW_LINE> def station_status_update(): <NEW_LINE> <INDENT> for station in Station.objects.all(): <NEW_LINE> <INDENT> if station.is_offline: <NEW_LINE> <INDENT> station.status = 0 <NEW_LINE> <DEDENT> elif station.testing: <NEW_LINE> <INDENT> station.status = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> station.status = 2 <NEW_LINE> <DEDENT> station.save()
Task to update Station status.
625941b8e1aae11d1e749b0c
def test_Conv1DTranspose_9(): <NEW_LINE> <INDENT> op = Net() <NEW_LINE> op.eval() <NEW_LINE> obj = APIOnnx(op, 'nn_Conv1DTranspose', [9]) <NEW_LINE> obj.set_input_data( "input_data", paddle.to_tensor( randtool("float", -1, 1, [3, 1, 10]).astype('float32'))) <NEW_LINE> obj.run()
api: paddle.Conv1DTranspose op version: 9
625941b87b25080760e392b3
def point_wrt_polygon(points, Q): <NEW_LINE> <INDENT> r = [] <NEW_LINE> n = point_wrt_polygon_([_p[0] for _p in points], [_p[1] for _p in points], Q[:,0].tolist(), Q[:,1].tolist(), r) <NEW_LINE> if n == -1 or n == -2: <NEW_LINE> <INDENT> raise core.Error("Size mismatch in p or Q") <NEW_LINE> <DEDENT> elif n < -2: <NEW_LINE> <INDENT> raise core.Error("Unspecific error") <NEW_LINE> <DEDENT> return r
Test the position of a list of points with respect to a polygon. Args: points (list): a list of (x,y) coordinates Q (numpy.array): (m x 2) the vertices of a polygon Returns: a list of codes, one code per point in <points> list: 1 for a point INSIDE the polygon 0 for a point on the BOUNDARY of the polygon -1 for a point OUTSIDE the polygon
625941b86aa9bd52df036bfa
def removeLeastUsedNode(self): <NEW_LINE> <INDENT> node = self.tail.prev <NEW_LINE> self.removeNode(node) <NEW_LINE> return node
Use this function to remove the least recently used node
625941b83eb6a72ae02ec333
def replace_whole(orig, src, tgt, drop_chars): <NEW_LINE> <INDENT> splits = un_split(orig, src, drop_chars) <NEW_LINE> rev_lookup = {} <NEW_LINE> for w, ss in zip(orig, splits): <NEW_LINE> <INDENT> for s in ss: <NEW_LINE> <INDENT> if rev_lookup.get(s, w) != w: <NEW_LINE> <INDENT> log.warning(f"Ambiguous :: {s} is either from {w} or {rev_lookup[s]}") <NEW_LINE> <DEDENT> rev_lookup[s] = w <NEW_LINE> <DEDENT> <DEDENT> src_vocab = set(src) <NEW_LINE> orig_vocab = set(orig) <NEW_LINE> result = [] <NEW_LINE> for i, word in enumerate(tgt): <NEW_LINE> <INDENT> if word in src_vocab: <NEW_LINE> <INDENT> if word in orig_vocab: <NEW_LINE> <INDENT> result.append(word) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tgt_word = rev_lookup[word] <NEW_LINE> if result and result[-1] == tgt_word: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(tgt_word) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result.append(word) <NEW_LINE> <DEDENT> <DEDENT> return result
Replaces parts with whole
625941b8aad79263cf390893
@blueprint.route("/database") <NEW_LINE> def admin_database_statistics(): <NEW_LINE> <INDENT> db_stats_count = [] <NEW_LINE> db_stats_total = 0 <NEW_LINE> for row_data in g.db_connection.db_pgsql_row_count(): <NEW_LINE> <INDENT> db_stats_total += row_data[2] <NEW_LINE> db_stats_count.append((row_data[1], common_internationalization.com_inter_number_format(row_data[2]))) <NEW_LINE> <DEDENT> db_stats_count.append( ('Total records:', common_internationalization.com_inter_number_format(db_stats_total))) <NEW_LINE> db_size_data = [] <NEW_LINE> db_size_total = 0 <NEW_LINE> for row_data in g.db_connection.db_pgsql_table_sizes(): <NEW_LINE> <INDENT> db_size_total += row_data['total_size'] <NEW_LINE> db_size_data.append( (row_data['relation'], common_string.com_string_bytes2human(row_data['total_size']))) <NEW_LINE> <DEDENT> db_size_data.append(('Total Size:', common_string.com_string_bytes2human(db_size_total))) <NEW_LINE> return render_template("admin/admin_server_database_stats.html", data_db_size=db_size_data, data_db_count=db_stats_count, data_workers=db_connection.db_parallel_workers())
Display database statistics page
625941b856b00c62f0f144b6
def iou_back(y_true, y_pred): <NEW_LINE> <INDENT> y_pred = 1-K.argmax(y_pred) <NEW_LINE> y_true = 1-K.argmax(y_true) <NEW_LINE> TP = tf.math.count_nonzero(y_pred * y_true) <NEW_LINE> TN = tf.math.count_nonzero((1-y_pred)*(1-y_true)) <NEW_LINE> FP = tf.math.count_nonzero(y_pred*(1-y_true)) <NEW_LINE> FN = tf.math.count_nonzero((1-y_pred)*y_true) <NEW_LINE> return TP/(TP+FP+FN)
calculate iou for background class IOU = true_positive / (true_positive + false_positive + false_negative)
625941b8be8e80087fb20aa8
def deleteregions(self): <NEW_LINE> <INDENT> cmd='regions delete all' <NEW_LINE> self.ds9.set(cmd)
Delete all regions in the frame
625941b838b623060ff0ac47
def _write_input_arguments(self, f): <NEW_LINE> <INDENT> input_arguments = self.parameters['input_arguments'] <NEW_LINE> if input_arguments is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for key, value in input_arguments.items(): <NEW_LINE> <INDENT> self._write_argument(key, value, f)
Write directly given input-arguments.
625941b89c8ee82313fbb5cd
@given(uint64()) <NEW_LINE> def test_uint64_generates_in_range_value_per_default(generated_value): <NEW_LINE> <INDENT> assert generated_value >= UINT64_MIN_VALUE <NEW_LINE> assert generated_value <= UINT64_MAX_VALUE
Verify default value range for Uint64.
625941b844b2445a33931ef8
def check_lang_id(self, lang_id): <NEW_LINE> <INDENT> return int(lang_id) == self.app.lang.get_id()
Returns True if the lang_id matches that used by the server
625941b86e29344779a6246e
def iter_line_ranges(self, start=0, stop=None): <NEW_LINE> <INDENT> string = self.store.string() <NEW_LINE> index = start <NEW_LINE> if stop is None: <NEW_LINE> <INDENT> stop = string.length() <NEW_LINE> <DEDENT> while index < stop: <NEW_LINE> <INDENT> rng = string.lineRangeForRange_((index, 0)) <NEW_LINE> yield rng <NEW_LINE> index = sum(rng)
Generate line ranges :param start: Hexichar index from which to start iterating. The default is zero (0). :param stop: The last hexichar index to consider. The default is the last hexichar index. :yields: Two-tuples `(location, length)`. `location` is the beginning of the line in hexichars, and `length` is the length of the line including newline character(s) in hexichars.
625941b8b830903b967e976f
def test_coulomb_matrix(self): <NEW_LINE> <INDENT> f = cm.CoulombMatrix(self.mol.GetNumAtoms()) <NEW_LINE> rval = f([self.mol]) <NEW_LINE> assert rval.shape == (1, self.mol.GetNumConformers(), self.mol.GetNumAtoms(), self.mol.GetNumAtoms())
Test CoulombMatrix.
625941b8ff9c53063f47c056
def listdir(directory): <NEW_LINE> <INDENT> file_names = list() <NEW_LINE> for filename in os.listdir(directory): <NEW_LINE> <INDENT> file_path = os.path.join(directory, filename) <NEW_LINE> if os.path.isdir(file_path): <NEW_LINE> <INDENT> filename = f'{filename}{os.path.sep}' <NEW_LINE> <DEDENT> file_names.append(filename) <NEW_LINE> <DEDENT> return file_names
Returns list of nested files and directories for local directory by path :param directory: absolute or relative path to local directory :return: list nested of file or directory names
625941b80383005118ecf43d
def test_list_false_crl(self): <NEW_LINE> <INDENT> self.data_in = [False] <NEW_LINE> self.expected = 'false' <NEW_LINE> opts.compact = True <NEW_LINE> opts.raw = True <NEW_LINE> opts.lines = True <NEW_LINE> self.assertEqual(self.json_out.create_json(self.data_in), self.expected)
Test [False] -crl
625941b85510c4643540f250
def jerrum_filter(self, alpha, cri): <NEW_LINE> <INDENT> n = self.n <NEW_LINE> r = self.r <NEW_LINE> G = self.G <NEW_LINE> gens = self.gens <NEW_LINE> cosrep = G._stabilizer_cosets <NEW_LINE> self.jgs = 0 <NEW_LINE> for j in range(n): <NEW_LINE> <INDENT> self.jg[j] = None <NEW_LINE> <DEDENT> self.freejg = list(range(n)) <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> self.vertex[i].neighbor = [] <NEW_LINE> self.vertex[i].perm = [] <NEW_LINE> <DEDENT> for i in range(n): <NEW_LINE> <INDENT> for j in range(n): <NEW_LINE> <INDENT> self.vertex[i].index_neighbor[j] = -1 <NEW_LINE> <DEDENT> <DEDENT> for i in range(n): <NEW_LINE> <INDENT> if cosrep[i] != None: <NEW_LINE> <INDENT> p1 = cosrep[i] <NEW_LINE> for j in range(r): <NEW_LINE> <INDENT> p2 = gens[j] <NEW_LINE> p3 = cri[ p2[i] ] <NEW_LINE> h = [p3[p2[k]] for k in p1] <NEW_LINE> self.insert(h, alpha) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> r = 0 <NEW_LINE> for j in range(n): <NEW_LINE> <INDENT> if self.jg[j] != None: <NEW_LINE> <INDENT> gens[r] = self.jg[j] <NEW_LINE> r += 1 <NEW_LINE> <DEDENT> <DEDENT> self.r = r
filter the generators of the stabilizer subgroup G_alpha Parameters ========== alpha point for which the stabilizer is computed cri[i] inverse of G._stabilizer_cosets[i] if ``i`` is not None Notes ===== Schreier lemma:: the stabilizer subgroup G_alpha of G is generated by the schreier generators h = cosrep[ p2[i] ]**-1 * g[j] * cosrep[i] where j=0,..,len(gens)-1 and i=0,..,n-1, where n is the degree. Proof that h belongs to G_alpha:: cosrep[k][alpha] = k for all k; cosrep[k]**-1[k] = alpha p1 = cosrep[i]; p2 = g[j] p3 = cosrep[ p2[i] ]; p3[alpha] = p2[i] p3**-1[p2[i] = alpha p3**-1[p2[p1[alpha]] = alpha, so h[alpha] = alpha Using Jerrum's filter one can reduce the len(gens)*n generators of G_alpha produced by the Schreier lemma to at most n-1 Jerrum's filter --------------- (see Cameron 'Permutation groups', page 22) _JGraph has n-1 vertices; the edges (i, j) are labeled by group elements ``g`` with j = imin(g) = min(i | g[i] != i); define m(graph) = sum(imin(g) for g in graph) At the beginning the graph has no edges, so it is an acyclic graph. Insert a group element ``g`` produced by the Schreier lemma; introduce in _JGraph an edge (imin(g), g[imin(g)); if the graph contains a cycle, let ``i0`` be the smallest point in the cycle, and ``h`` the product of the group elements labeling the edges in the cycle, starting from ``i0``; h[j] = j for j <= i0; modify it eliminating the edge (i0, g0[i0]) in the cycle; one obtains a new acyclic graph with m(graph_new) > m(graph). ``g0`` can be expressed as a product of ``h`` and the other elements in the cycle. Then insert ``h`` in the graph, and so on. Since m < n**2, this process ends after a finite number of times, so in the end one remains with an acyclic graph, with at most n-1 edges and the same number of generators.
625941b8cc40096d615957ac
def testLegacyTask(self): <NEW_LINE> <INDENT> retVal = LegacyTask.parseAndRun(args=[DataPath, "--output", self.outPath, "--id", "visit=3", "filter=r"], doReturnResults=True) <NEW_LINE> self.assertEqual(retVal.resultList[0].result.didEnterRun, True)
Test error handling when a task cannot be constructed
625941b832920d7e50b28025
@cacheable <NEW_LINE> def make_content_view(options=None): <NEW_LINE> <INDENT> if not options or not options.get('organization-id'): <NEW_LINE> <INDENT> raise CLIFactoryError('Please provide a valid ORG ID.') <NEW_LINE> <DEDENT> args = { u'component-ids': None, u'composite': False, u'description': None, u'label': None, u'name': gen_string('alpha', 10), u'organization': None, u'organization-id': None, u'organization-label': None, u'repository-ids': None } <NEW_LINE> return create_object(ContentView, args, options)
Usage:: hammer content-view create [OPTIONS] Options:: --component-ids COMPONENT_IDS List of component content view version ids for composite views Comma separated list of values. --composite Create a composite content view --description DESCRIPTION Description for the content view --label LABEL Content view label --name NAME Name of the content view --organization ORGANIZATION_NAME Organization name to search by --organization-id ORGANIZATION_ID Organization identifier --organization-label ORGANIZATION_LABEL Organization label to search by --repository-ids REPOSITORY_IDS List of repository ids Comma separated list of values. -h, --help print help
625941b8aad79263cf390894
def umode(self, nick, modes=''): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if not modes: <NEW_LINE> <INDENT> self.send('MODE %s' % nick) <NEW_LINE> if self.readable(): <NEW_LINE> <INDENT> msg = self._recv(expected_replies=('221',)) <NEW_LINE> if msg[0] == '221': <NEW_LINE> <INDENT> modes = msg[2].replace('+', '').replace(':', '', 1) <NEW_LINE> <DEDENT> <DEDENT> return modes <NEW_LINE> <DEDENT> self.send('MODE %s %s' % (nick, modes)) <NEW_LINE> if self.readable(): <NEW_LINE> <INDENT> msg = self._recv(expected_replies=('MODE',)) <NEW_LINE> if msg[0] == 'MODE': <NEW_LINE> <INDENT> if not self.hide_called_events: <NEW_LINE> <INDENT> self.stepback() <NEW_LINE> <DEDENT> return msg[2].replace(':', '', 1)
Sets/gets user modes. Required arguments: * nick - Nick to set/get user modes for. Optional arguments: * modes='' - Sets these user modes on a nick.
625941b80a50d4780f666ce8
def __init__(self, carrier=None, service_id=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._carrier = None <NEW_LINE> self._service_id = None <NEW_LINE> self.discriminator = None <NEW_LINE> if carrier is not None: <NEW_LINE> <INDENT> self.carrier = carrier <NEW_LINE> <DEDENT> if service_id is not None: <NEW_LINE> <INDENT> self.service_id = service_id
CrossBorderQuotesRequestRates - a model defined in OpenAPI
625941b8099cdd3c635f0ab5
def generate_contents(self, path, key): <NEW_LINE> <INDENT> for entry in os.listdir(path): <NEW_LINE> <INDENT> if (key(os.path.join(path, entry))): <NEW_LINE> <INDENT> yield entry
Generates Contents (Dirs & Files) Of A Given System
625941b850485f2cf553cbf1
def update(self): <NEW_LINE> <INDENT> evo_data = self.hass.data[DATA_EVOHOME] <NEW_LINE> timeout = datetime.now() + timedelta(seconds=55) <NEW_LINE> expired = timeout > self._timers['statusUpdated'] + timedelta(seconds=evo_data['params'][CONF_SCAN_INTERVAL]) <NEW_LINE> if not expired: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> was_available = self._available or self._timers['statusUpdated'] == datetime.min <NEW_LINE> self._update_state_data(evo_data) <NEW_LINE> self._status = evo_data['status'] <NEW_LINE> if _LOGGER.isEnabledFor(logging.DEBUG): <NEW_LINE> <INDENT> tmp_dict = dict(self._status) <NEW_LINE> if 'zones' in tmp_dict: <NEW_LINE> <INDENT> tmp_dict['zones'] = '...' <NEW_LINE> <DEDENT> if 'dhw' in tmp_dict: <NEW_LINE> <INDENT> tmp_dict['dhw'] = '...' <NEW_LINE> <DEDENT> _LOGGER.debug( "update(%s), self._status = %s", self._id + " [" + self._name + "]", tmp_dict ) <NEW_LINE> <DEDENT> no_recent_updates = self._timers['statusUpdated'] < datetime.now() - timedelta(seconds=self._params[CONF_SCAN_INTERVAL] * 3.1) <NEW_LINE> if no_recent_updates: <NEW_LINE> <INDENT> self._available = False <NEW_LINE> debug_code = EVO_DEBUG_NO_RECENT_UPDATES <NEW_LINE> <DEDENT> elif not self._status: <NEW_LINE> <INDENT> self._available = False <NEW_LINE> debug_code = EVO_DEBUG_NO_STATUS <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._available = True <NEW_LINE> <DEDENT> if not self._available and was_available: <NEW_LINE> <INDENT> _LOGGER.warning( "The entity, %s, has become unavailable, debug code is: %s", self._id + " [" + self._name + "]", debug_code ) <NEW_LINE> <DEDENT> elif self._available and not was_available: <NEW_LINE> <INDENT> _LOGGER.debug( "The entity, %s, has become available", self._id + " [" + self._name + "]" )
Get the latest state data of the installation. This includes state data for the Controller and its child devices, such as the operating_mode of the Controller and the current_temperature of its children. This is not asyncio-friendly due to the underlying client api.
625941b8090684286d50eb39
def Phase_ClassName(): <NEW_LINE> <INDENT> return _DataModel.Phase_ClassName()
Phase_ClassName() -> char const *
625941b866656f66f7cbc003
def get_message(self): <NEW_LINE> <INDENT> return self.message % {"error_code": self.code}
Get the external message for an error.
625941b863d6d428bbe44348
def copy_node(self, node_id, copy_id=None): <NEW_LINE> <INDENT> if copy_id is None: <NEW_LINE> <INDENT> copy_id = self.generate_new_node_id(node_id) <NEW_LINE> <DEDENT> if copy_id in self.nodes(): <NEW_LINE> <INDENT> raise ReGraphError( "Cannot create a copy of '{}' with id '{}', ".format( node_id, copy_id) + "node '{}' already exists in the graph".format(copy_id)) <NEW_LINE> <DEDENT> attrs = self.get_node(node_id) <NEW_LINE> self.add_node(copy_id, attrs) <NEW_LINE> return copy_id
Copy node. Create a copy of a node in a graph. A new id for the copy is generated by regraph.primitives.unique_node_id. Parameters ---------- node_id : hashable Node to copy. Returns ------- new_name Id of the copy node.
625941b891af0d3eaac9b86d
@register.tag <NEW_LINE> def sitetree_children(parser, token): <NEW_LINE> <INDENT> tokens = token.split_contents() <NEW_LINE> context_var = None <NEW_LINE> if tokens[-2] == 'as': <NEW_LINE> <INDENT> context_var = tokens[-1] <NEW_LINE> tokens = tokens[:-2] <NEW_LINE> <DEDENT> use_template = detect_clause(parser, 'template', tokens) <NEW_LINE> menu_name = detect_clause(parser, 'name', tokens) <NEW_LINE> tokens_num = len(tokens) <NEW_LINE> clauses_in_places = ( tokens_num == 5 and tokens[1] == 'of' and tokens[3] == 'for' and tokens[4] in ('menu', 'sitetree') ) <NEW_LINE> if clauses_in_places and any([use_template, context_var]): <NEW_LINE> <INDENT> tree_item = tokens[2] <NEW_LINE> navigation_type = tokens[4] <NEW_LINE> return sitetree_childrenNode(tree_item, navigation_type, use_template, context_var, menu_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise template.TemplateSyntaxError( '%r tag requires six arguments. ' 'E.g. {%% sitetree_children of someitem for menu template "sitetree/mychildren.html" %%}.' % tokens[0])
Parses sitetree_children tag parameters. Six arguments: {% sitetree_children of someitem for menu template "sitetree/mychildren.html" %} Used to render child items of specific site tree 'someitem' using template "sitetree/mychildren.html" for menu navigation. Basically template argument should contain path to current template itself. Allowed navigation types: 1) menu; 2) sitetree.
625941b84f6381625f11489f
def test_get_series_name(self): <NEW_LINE> <INDENT> cases = [ { "app_name": 'Foo', "expected_series_name": 'foo_metrics' }, { "app_name": 'foo', "expected_series_name": 'foo_metrics' }, { "app_name": 'app', "series_name": 'Perf_Data', "expected_series_name": 'app_perf_data' } ] <NEW_LINE> for case in cases: <NEW_LINE> <INDENT> self.check_get_series_name(**case)
Test get_series_name() method
625941b8d4950a0f3b08c1b3
def get_error_mapping(self): <NEW_LINE> <INDENT> return SeedManifest(self.manifest).get_error_mapping()
Returns the error mapping for this job type :returns: The error mapping :rtype: :class:`job.error.mapping.JobErrorMapping`
625941b8e1aae11d1e749b0d
def patch_volume_attachment_with_http_info(self, name, body, **kwargs): <NEW_LINE> <INDENT> all_params = ['name', 'body', 'pretty'] <NEW_LINE> all_params.append('async') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in params['kwargs'].items(): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method patch_volume_attachment" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('name' not in params or params['name'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `name` when calling `patch_volume_attachment`") <NEW_LINE> <DEDENT> if ('body' not in params or params['body'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `body` when calling `patch_volume_attachment`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> if 'name' in params: <NEW_LINE> <INDENT> path_params['name'] = params['name'] <NEW_LINE> <DEDENT> query_params = [] <NEW_LINE> if 'pretty' in params: <NEW_LINE> <INDENT> query_params.append(('pretty', params['pretty'])) <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> if 'body' in params: <NEW_LINE> <INDENT> body_params = params['body'] <NEW_LINE> <DEDENT> header_params['Accept'] = self.api_client.select_header_accept( ['application/json', 'application/yaml', 'application/vnd.kubernetes.protobuf']) <NEW_LINE> header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json-patch+json', 'application/merge-patch+json', 'application/strategic-merge-patch+json']) <NEW_LINE> auth_settings = ['BearerToken'] <NEW_LINE> return self.api_client.call_api( '/apis/storage.k8s.io/v1alpha1/volumeattachments/{name}', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='V1alpha1VolumeAttachment', auth_settings=auth_settings, _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
patch_volume_attachment # noqa: E501 partially update the specified VolumeAttachment # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.patch_volume_attachment_with_http_info(name, body, async=True) >>> result = thread.get() :param: async bool :param: str name: name of the VolumeAttachment (required) :param: object body: (required) :param: str pretty: If 'true', then the output is pretty printed. :return: V1alpha1VolumeAttachment If the method is called asynchronously, returns the request thread.
625941b894891a1f4081b901
def distance2(v, w): <NEW_LINE> <INDENT> return magnitude(vector_substract(v, w))
TODO: Docstring for distance. :v: TODO :w: TODO :returns: TODO
625941b829b78933be1e5512
def patch(user_request, url, data=None, **kwargs): <NEW_LINE> <INDENT> _set_session_key(user_request, kwargs) <NEW_LINE> if debug: <NEW_LINE> <INDENT> log(user_request, url, "PATCH", data=data, kwargs=kwargs) <NEW_LINE> <DEDENT> return requests.patch(url, data, **kwargs)
A wrapper of requests.patch. This method will automatically add user's session key as the cookie to enable sso Sends a PATCH request. Returns :class:`Response` object. :param user_request: The http request contains the authentication key and is triggered by user. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes.
625941b8379a373c97cfa9a3
@app_views.route('/forbidden', methods=['GET'], strict_slashes=False) <NEW_LINE> def forbidden() -> str: <NEW_LINE> <INDENT> return abort(403)
GET /api/v1/forbidden Return: - raises a 403 error by using abort
625941b8de87d2750b85fbe7
def test_rename_then_slug(self): <NEW_LINE> <INDENT> self.assertLines( [ "-S", "--rename", 'a|Alice,"b|big-bob!","c| CA. "', "examples/dummy.csv", ], ["alice,big_bob,ca", "1,2,3"], )
renaming always happens first, then the slugging
625941b8e8904600ed9f1d82
def __init__(self, name, diag, freq): <NEW_LINE> <INDENT> CompositeDiagnosticTask.__init__(self, name + ' topic status') <NEW_LINE> self.diag = diag <NEW_LINE> self.freq = FrequencyStatus(freq) <NEW_LINE> self.addTask(self.freq) <NEW_LINE> self.diag.add(self)
Construct a HeaderlessTopicDiagnostic. @param name The name of the topic that is being diagnosed. @param diag The diagnostic_updater that the CompositeDiagnosticTask should add itself to. @param freq The parameters for the FrequencyStatus class that will be computing statistics.
625941b81f5feb6acb0c49ae
def get_all_users(self, different_name=' ', page_start=0, page_size=20, is_count=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sys_name = TvbProfile.current.web.admin.SYSTEM_USER_NAME <NEW_LINE> query = self.session.query(User ).filter(User.username != different_name ).filter(User.username != sys_name) <NEW_LINE> if is_count: <NEW_LINE> <INDENT> result = query.count() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = query.order_by(User.username).offset(max(page_start, 0)).limit(max(page_size, 0)).all() <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> except NoResultFound: <NEW_LINE> <INDENT> self.logger.warning("No users found. Maybe database is empty.") <NEW_LINE> raise
Retrieve all USERS in DB, except current user and system user.
625941b863b5f9789fde6f3f
def ProcessCmdKey(self,*args): <NEW_LINE> <INDENT> pass
ProcessCmdKey(self: Control,msg: Message,keyData: Keys) -> (bool,Message) Processes a command key. msg: A System.Windows.Forms.Message,passed by reference,that represents the window message to process. keyData: One of the System.Windows.Forms.Keys values that represents the key to process. Returns: true if the character was processed by the control; otherwise,false.
625941b8925a0f43d2549ccd
@mod_reports.route('/charts/get_data/<int:chart_id>', methods=['GET']) <NEW_LINE> @mod_reports.route('/charts/get_data/<int:chart_id>/', methods=['GET']) <NEW_LINE> @login_required <NEW_LINE> def get_chart_data(chart_id): <NEW_LINE> <INDENT> start = request.args.get('start') <NEW_LINE> end = request.args.get('end') <NEW_LINE> data = {} <NEW_LINE> chart = Chart.query.get(chart_id) <NEW_LINE> if chart and start and end: <NEW_LINE> <INDENT> data = chart.data_points( min_date=start, max_date=end, ds_format=True, ) <NEW_LINE> <DEDENT> return jsonify(**data)
Retrieve more data in JSON format for a specific chart
625941b8a4f1c619b28afe9b
def to_sqlite(df, out_folder, db_name, table_name, spatial_type="ST_GEOMETRY", overwrite=True): <NEW_LINE> <INDENT> if HASARCPY: <NEW_LINE> <INDENT> sqldb = os.path.join(out_folder, db_name) <NEW_LINE> if os.path.isdir(out_folder) == False: <NEW_LINE> <INDENT> raise ValueError("Save folder does not exist") <NEW_LINE> <DEDENT> if os.path.isfile(sqldb) and overwrite: <NEW_LINE> <INDENT> os.remove(sqldb) <NEW_LINE> <DEDENT> elif os.path.isfile(sqldb) and overwrite: <NEW_LINE> <INDENT> raise ValueError("SQLite database exists, please pick a different name") <NEW_LINE> <DEDENT> db = arcpy.CreateSQLiteDatabase_management(out_database_name=sqldb, spatial_type=spatial_type)[0] <NEW_LINE> return to_featureclass(df=df, out_location=db, out_name=table_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ImportError("arcpy is required to perform this operation") <NEW_LINE> <DEDENT> return
Creates a new sqlite database and imports the dataframe data to that database. Inputs: df: spatial dataframe to export out_folder: Location of the SQLite database to be created db_name: Name of the SQLite database file. An extension of .sqlite will be automatically assigned if the spatial_type is ST_GEOMETRY or SPATIALITE. If the spatial_type is GEOPACKAGE, a .gpkg extension is automatically assigned. table_name: name of the exported spatial dataframe spatial_type: The spatial type to install with the new SQLite database. ST_GEOMETRY-Esri's spatial storage type. This is the default. SPATIALITE-SpatiaLite spatial storage type. GEOPACKAGE-OGC GeoPackage dataset. overwrite: True will erase data if it exists, false will throw error if the sqlite database exists. Output: sqlite Database path location plus the feature class.
625941b82c8b7c6e89b3561d
def exercise(): <NEW_LINE> <INDENT> for d_min in [1,2,6]: <NEW_LINE> <INDENT> print(d_min, "-"*69) <NEW_LINE> pi = get_pdb_inputs(pdb_str=pdb_str_1) <NEW_LINE> f_calc = pi.xrs.structure_factors(d_min = d_min).f_calc() <NEW_LINE> fft_map = f_calc.fft_map(resolution_factor=0.25) <NEW_LINE> fft_map.apply_sigma_scaling() <NEW_LINE> map_data = fft_map.real_map_unpadded() <NEW_LINE> w = weight.run( map_data = map_data, xray_structure = pi.xrs, pdb_hierarchy = pi.ph, geometry_restraints_manager = pi.grm).weight <NEW_LINE> ro = individual_sites.simple( target_map = map_data, selection = flex.bool(pi.xrs.scatterers().size(), True), geometry_restraints_manager = pi.grm.geometry) <NEW_LINE> ro.refine(weight=w, xray_structure = pi.xrs) <NEW_LINE> print(ro.rmsds())
Exercise determination of optimal weights for real-space refinement of individual coordinates.
625941b87cff6e4e811177df
def seresnet200b(**kwargs): <NEW_LINE> <INDENT> return get_seresnet(blocks=200, conv1_stride=False, model_name="seresnet200b", **kwargs)
SE-ResNet-200 model with stride at the second convolution in bottleneck block from 'Squeeze-and-Excitation Networks,' https://arxiv.org/abs/1709.01507. It's an experimental model. Parameters: ---------- pretrained : bool, default False Whether to load the pretrained weights for model. root : str, default '~/.tensorflow/models' Location for keeping the model parameters. Returns: ------- functor Functor for model graph creation with extra fields.
625941b8099cdd3c635f0ab6
def inputs(eval_data): <NEW_LINE> <INDENT> return blood_data.inputs_balanced(batch_size=FLAGS.batch_size, fake_data=False, one_hot=True, dtype=tf.uint8, eval_data=eval_data)
return balanced set of cell input
625941b876d4e153a657e98a
def isinf(x, out=None): <NEW_LINE> <INDENT> pass
isinf(x[, out]) Test element-wise for positive or negative infinity. Returns a boolean array of the same shape as `x`, True where ``x == +/-inf``, otherwise False. Parameters ---------- x : array_like Input values out : array_like, optional An array with the same shape as `x` to store the result. Returns ------- y : bool (scalar) or boolean ndarray For scalar input, the result is a new boolean with value True if the input is positive or negative infinity; otherwise the value is False. For array input, the result is a boolean array with the same shape as the input and the values are True where the corresponding element of the input is positive or negative infinity; elsewhere the values are False. If a second argument was supplied the result is stored there. If the type of that array is a numeric type the result is represented as zeros and ones, if the type is boolean then as False and True, respectively. The return value `y` is then a reference to that array. See Also -------- isneginf, isposinf, isnan, isfinite Notes ----- Numpy uses the IEEE Standard for Binary Floating-Point for Arithmetic (IEEE 754). Errors result if the second argument is supplied when the first argument is a scalar, or if the first and second arguments have different shapes. Examples -------- >>> np.isinf(np.inf) True >>> np.isinf(np.nan) False >>> np.isinf(np.NINF) True >>> np.isinf([np.inf, -np.inf, 1.0, np.nan]) array([ True, True, False, False], dtype=bool) >>> x = np.array([-np.inf, 0., np.inf]) >>> y = np.array([2, 2, 2]) >>> np.isinf(x, y) array([1, 0, 1]) >>> y array([1, 0, 1])
625941b8097d151d1a222cb6
@enforce_user_agent <NEW_LINE> @never_cache <NEW_LINE> def give_feedback(request, ua, type): <NEW_LINE> <INDENT> Formtype = PraiseForm <NEW_LINE> if type == OPINION_PRAISE: <NEW_LINE> <INDENT> Formtype = PraiseForm <NEW_LINE> <DEDENT> elif type == OPINION_ISSUE: <NEW_LINE> <INDENT> Formtype = IssueForm <NEW_LINE> <DEDENT> elif type == OPINION_SUGGESTION: <NEW_LINE> <INDENT> Formtype = SuggestionForm <NEW_LINE> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> form = Formtype(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> if not form.cleaned_data.get('add_url', False): <NEW_LINE> <INDENT> form.cleaned_data['url'] = '' <NEW_LINE> <DEDENT> locale = detect_language(request) <NEW_LINE> new_opinion = Opinion( type=type, url=form.cleaned_data.get('url', ''), description=form.cleaned_data['description'], user_agent=ua, locale=locale, manufacturer=form.cleaned_data['manufacturer'], device=form.cleaned_data['device']) <NEW_LINE> new_opinion.save() <NEW_LINE> return http.HttpResponseRedirect(reverse('feedback.thanks')) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> url = request.GET.get('url', '') <NEW_LINE> form = Formtype(initial={'url': url, 'add_url': False, 'type': type}) <NEW_LINE> <DEDENT> div_id = 'feedbackform' <NEW_LINE> if type == OPINION_SUGGESTION: <NEW_LINE> <INDENT> div_id = 'suggestionform' <NEW_LINE> <DEDENT> url_suggestion = request.GET.get('url', 'suggestion') <NEW_LINE> data = { 'form': form, 'type': type, 'div_id': div_id, 'MAX_FEEDBACK_LENGTH': settings.MAX_FEEDBACK_LENGTH, 'OPINION_PRAISE': OPINION_PRAISE, 'OPINION_ISSUE': OPINION_ISSUE, 'OPINION_SUGGESTION': OPINION_SUGGESTION, 'url_suggestion': url_suggestion } <NEW_LINE> template = ('feedback/mobile/feedback.html' if request.mobile_site else 'feedback/feedback.html') <NEW_LINE> return jingo.render(request, template, data)
Submit feedback page
625941b82eb69b55b151c704
def fill_in_questionnaire(self, questionnaire, **kwargs): <NEW_LINE> <INDENT> response = QuestionnaireResponse(questionnaire_id=questionnaire.id, **kwargs) <NEW_LINE> self.questionnaire_responses.append(response) <NEW_LINE> return response
Create a questionnaire response.
625941b896565a6dacc8f52e
def divisionWithRemainder(self,a,b): <NEW_LINE> <INDENT> raise NotImplementedError()
b!=0 returns (s,r) with a=bs+r and f(r)<f(b)
625941b8b57a9660fec336da
def study(self, text): <NEW_LINE> <INDENT> if not text in self._random: <NEW_LINE> <INDENT> self._random.append(text)
ユーザーの発言textをメモリに保存する。 すでに同じ発言があった場合は何もしない。
625941b8fbf16365ca6f6016
def __call__(self, inter: 'freestyle.types.Interface1D') -> float: <NEW_LINE> <INDENT> pass
Returns the average depth evaluated for an Interface1D. The average depth is evaluated for a set of points along the Interface1D (using the freestyle.functions.LocalAverageDepthF0D functor) with a user-defined sampling and then integrated into a single value using a user-defined integration method. :param inter: An Interface1D object. :type inter: 'freestyle.types.Interface1D' :rtype: float :return: The average depth evaluated for the Interface1D.
625941b8cc0a2c11143dccf2
def get_category_display(self): <NEW_LINE> <INDENT> u <NEW_LINE> try: <NEW_LINE> <INDENT> category = int(self.cleaned_data['category']) <NEW_LINE> <DEDENT> except (AttributeError, ValueError): <NEW_LINE> <INDENT> category = None <NEW_LINE> <DEDENT> return dict(CONTACT_CHOICES).get(category)
Returns the displayed name of the selected category.
625941b8b7558d58953c4d75
def has_failed_outputs(self): <NEW_LINE> <INDENT> return False
Determine if generation of any of the outputs failed.
625941b830c21e258bdfa2f7
def get_price_guide(self, game_id: int = 1): <NEW_LINE> <INDENT> request_method = 'GET' <NEW_LINE> resource_url = '/priceguide' <NEW_LINE> if isinstance(game_id, int) and game_id > 1: <NEW_LINE> <INDENT> params = {'idGame': game_id} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params = {} <NEW_LINE> <DEDENT> return self.resolve(request_method, resource_url, params=params)
Attention: This request is restricted to Widget apps, 3rd party apps, and Dedicated apps of powersellers and professionals. Returns a gzipped CSV file with relevant price guides for the specified game (default: MtG). The file is updated once every two hours. Security mechanisms will be implemented to return a 427 if requested more frequently. The response object and the relevant priceguidefile contains a string which is Base64 encoded. Decoding it returns a binary string that has to be written to an empty file. This file is gzipped and finally needs to be unpacked to retrieve the CSV file. :param game_id: ID of the game (default: 1 for MtG) :return: Base64 encoded string. See above.
625941b84d74a7450ccd401c
def show(goods, flag = 0): <NEW_LINE> <INDENT> tr = "+"+"-"*5+"+"+"-"*16+"+"+"-"*10+"+" <NEW_LINE> heading = "|{:^5s}|{:^13s}|{:^8s}|".format("id", "商品名", "售价") <NEW_LINE> print(tr+"\n"+heading+"\n"+tr) <NEW_LINE> if flag == 0: <NEW_LINE> <INDENT> for id_ in goods: <NEW_LINE> <INDENT> print("|{0:^5s}|{1:{3}^8s}|{2:^10s}|".format(str(id_), goods[id_]["name"], str(goods[id_]["price"]), chr(12288))) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for item in goods: <NEW_LINE> <INDENT> print("|{0:^5s}|{1:{3}^8s}|{2:^10s}|".format(str(item["id"]), item["name"], str(item["price"]), chr(12288))) <NEW_LINE> <DEDENT> <DEDENT> print(tr)
:param goods: goods table, e.g:{ 1: {"name": "洗发水", "price": 22}, 2: {"name": "牙膏", "price": 15}, }, or [{"id": 1, "name": "洗发水", "price": 22}, ] :return:
625941b850812a4eaa59c17f
def save(self, force = 1): <NEW_LINE> <INDENT> if self.readonly: return <NEW_LINE> if force: <NEW_LINE> <INDENT> self.saveTimer = None <NEW_LINE> dbFile = open(self.saveFilename, 'wb') <NEW_LINE> data = dict([i for i in self.localitems() if not i[1].getTemporary()]) <NEW_LINE> pickle.dump(data, dbFile) <NEW_LINE> dbFile.close() <NEW_LINE> self.writeLogLine('Saved local dictionary to '+os.path.abspath(self.saveFilename)) <NEW_LINE> <DEDENT> elif not self.saveTimer: <NEW_LINE> <INDENT> import threading <NEW_LINE> self.saveTimer = threading.Timer(5, self.save, [], {'force': 1}) <NEW_LINE> self.saveTimer.setDaemon(1) <NEW_LINE> self.saveTimer.start() <NEW_LINE> <DEDENT> return
Save the dictionary after 5 seconds, ignoring all subsequent calls until the save - Giving force = True will cause an immediate save
625941b830dc7b76659017c4
def listener_event_alerter(channel): <NEW_LINE> <INDENT> l = redis.pubsub(ignore_subscribe_messages=True) <NEW_LINE> c_key = keynamehelper.create_key_name(channel, "[^Opening]*") <NEW_LINE> l.psubscribe(c_key) <NEW_LINE> for message in l.listen(): <NEW_LINE> <INDENT> order_id = message['data'] <NEW_LINE> so_key = keynamehelper.create_key_name("sales_order", order_id) <NEW_LINE> (event_sku, qty, cost) = redis.hmget(so_key, 'event', 'qty', 'cost') <NEW_LINE> print("Purchase {}: #{} ${}".format(event_sku, qty, cost))
Listener for purchases for events other than 'Opening Ceremony'.
625941b8a8370b77170526fb
def run(self): <NEW_LINE> <INDENT> while self.running: <NEW_LINE> <INDENT> if self.rx_queue.empty(): <NEW_LINE> <INDENT> sleep(0) <NEW_LINE> continue <NEW_LINE> <DEDENT> targets = '' <NEW_LINE> num_targets = 0 <NEW_LINE> frame, t_taken = self.rx_queue.get(True, timeout=.1) <NEW_LINE> now = time() <NEW_LINE> delta = now - self.time_last <NEW_LINE> self.fps.append(1.0 / delta) <NEW_LINE> self.time_last = now <NEW_LINE> self.pipeline.process(frame) <NEW_LINE> frame_h, frame_w, _ = frame.shape <NEW_LINE> center_x = frame_w / 2.0 <NEW_LINE> center_y = frame_h / 2.0 <NEW_LINE> for index, contour in enumerate(self.pipeline.filter_contours_output): <NEW_LINE> <INDENT> pixel_x, pixel_y, pixel_w, pixel_h = cv2.boundingRect(contour) <NEW_LINE> percent_x = (pixel_x - center_x) / center_x <NEW_LINE> percent_y = (pixel_y - center_y) / center_y <NEW_LINE> targets += "{},{},{},{},".format( percent_x, percent_y, pixel_w, pixel_h) <NEW_LINE> num_targets += 1 <NEW_LINE> <DEDENT> coprocessor_data = "{},{},{},{},{},{}\n".format( (time() - t_taken) * 1000.0, frame_w, frame_h, self.fps.get_average(1), num_targets, targets, ) <NEW_LINE> self.com.transmit(coprocessor_data) <NEW_LINE> if not self.stream.full(): <NEW_LINE> <INDENT> self.stream.put(frame)
Process frame for targets
625941b84428ac0f6e5ba64c
def archive_create(file_path, output_path, *, archive_name=None): <NEW_LINE> <INDENT> from d8s_file_system import file_name <NEW_LINE> if archive_name is None: <NEW_LINE> <INDENT> archive_name = file_name(file_path) <NEW_LINE> <DEDENT> with _archive_zip(output_path) as zipped_file: <NEW_LINE> <INDENT> zipped_file.write(file_path, arcname=archive_name)
Archive the given file.
625941b897e22403b379cdf3
def reject(reserve_obj): <NEW_LINE> <INDENT> reserver = reserve_obj.reserver <NEW_LINE> if hasattr(reserver, 'wechat') and reserver.wechat.binded: <NEW_LINE> <INDENT> to_weu = reserver.wechat <NEW_LINE> triggers = TemplateMessageTrigger.objects.filter(type=202) <NEW_LINE> for trigger in triggers: <NEW_LINE> <INDENT> send_template_message(to_weu, trigger, data={ 'reservee_name': reserve_obj.reservee.name, 'reservee_tel': reserve_obj.reservee.tel, 'reserver_name': reserve_obj.reserver.name, 'reserver_tel': reserve_obj.reserver.tel, 'reserve_time': reserve_obj.main_time.display })
拒绝预约 :param reserve_obj: ReserveRecord 预约对象 :return: None
625941b8fb3f5b602dac34e9
def upload_file(file_ref, file_source, file_folder): <NEW_LINE> <INDENT> service = discovery.build('drive', 'v3', requestBuilder=build_request) <NEW_LINE> file_metadata = {'name': file_ref['name'].replace('.sql', ''), 'mimeType': 'application/vnd.google-apps.spreadsheet', 'parents': [file_folder] } <NEW_LINE> logger.info("Uploading file %s", file_metadata['name']) <NEW_LINE> media = MediaIoBaseUpload(file_source, mimetype='text/csv', resumable=True) <NEW_LINE> existing_id = None <NEW_LINE> for ff in get_files_in_folder(folder_id=file_folder): <NEW_LINE> <INDENT> logger.debug(ff) <NEW_LINE> if (ff.get('mimeType') == 'application/vnd.google-apps.spreadsheet') and (ff.get('name') == file_metadata['name']): <NEW_LINE> <INDENT> existing_id = ff.get('id') <NEW_LINE> <DEDENT> <DEDENT> response = None <NEW_LINE> logger.debug("Begin writing to google sheets") <NEW_LINE> try: <NEW_LINE> <INDENT> if existing_id: <NEW_LINE> <INDENT> logger.debug("Found existing sheet: %s, will update it" % existing_id) <NEW_LINE> del file_metadata['parents'] <NEW_LINE> request = service.files().update(fileId=existing_id, body=file_metadata, media_body=media, fields='id') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug("Writing new sheet") <NEW_LINE> request = service.files().create(body=file_metadata, media_body=media, fields='id') <NEW_LINE> <DEDENT> while response is None: <NEW_LINE> <INDENT> status, response = request.next_chunk() <NEW_LINE> if status: <NEW_LINE> <INDENT> logger.debug("Uploaded %d%%." % int(status.progress() * 100)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except apiclient.errors.HttpError as e: <NEW_LINE> <INDENT> logger.error(e) <NEW_LINE> return None <NEW_LINE> <DEDENT> logger.debug("Uploaded 100%") <NEW_LINE> logger.info('Finished loading file %s', file_metadata['name'])
Upload and convert the CSV file :param file_ref: Dict containing metadata about file :param file_source: File like object to upload :param file_folder: Folder to place resulting google sheets into.
625941b88a43f66fc4b53ec3
def run(self): <NEW_LINE> <INDENT> super(COTAddFile, self).run() <NEW_LINE> vm = self.vm <NEW_LINE> filename = os.path.basename(self.file) <NEW_LINE> (file_obj, _, _, _) = vm.search_from_filename(filename) <NEW_LINE> if self.file_id is not None: <NEW_LINE> <INDENT> (file_obj2, _, _, _) = vm.search_from_file_id(self.file_id) <NEW_LINE> file_obj = check_for_conflict("File to overwrite", [file_obj, file_obj2]) <NEW_LINE> <DEDENT> if self.file_id is None: <NEW_LINE> <INDENT> if file_obj is not None: <NEW_LINE> <INDENT> self.file_id = vm.get_id_from_file(file_obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.file_id = filename <NEW_LINE> <DEDENT> <DEDENT> if file_obj is not None: <NEW_LINE> <INDENT> self.ui.confirm_or_die("Replace existing file {0} with {1}?" .format(vm.get_path_from_file(file_obj), self.file)) <NEW_LINE> logger.notice("Overwriting existing File in OVF") <NEW_LINE> <DEDENT> vm.add_file(self.file, self.file_id, file_obj)
Do the actual work of this command. Raises: InvalidInputError: if :func:`ready_to_run` reports ``False``
625941b8c4546d3d9de7288a
def generate_numba_transform_func( kwargs: dict[str, Any], func: Callable[..., np.ndarray], engine_kwargs: dict[str, bool] | None, ) -> Callable[[np.ndarray, np.ndarray, np.ndarray, np.ndarray, int, Any], np.ndarray]: <NEW_LINE> <INDENT> nopython, nogil, parallel = get_jit_arguments(engine_kwargs, kwargs) <NEW_LINE> validate_udf(func) <NEW_LINE> cache_key = (func, "groupby_transform") <NEW_LINE> if cache_key in NUMBA_FUNC_CACHE: <NEW_LINE> <INDENT> return NUMBA_FUNC_CACHE[cache_key] <NEW_LINE> <DEDENT> numba_func = jit_user_function(func, nopython, nogil, parallel) <NEW_LINE> if TYPE_CHECKING: <NEW_LINE> <INDENT> import numba <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> numba = import_optional_dependency("numba") <NEW_LINE> <DEDENT> @numba.jit(nopython=nopython, nogil=nogil, parallel=parallel) <NEW_LINE> def group_transform( values: np.ndarray, index: np.ndarray, begin: np.ndarray, end: np.ndarray, num_columns: int, *args: Any, ) -> np.ndarray: <NEW_LINE> <INDENT> assert len(begin) == len(end) <NEW_LINE> num_groups = len(begin) <NEW_LINE> result = np.empty((len(values), num_columns)) <NEW_LINE> for i in numba.prange(num_groups): <NEW_LINE> <INDENT> group_index = index[begin[i] : end[i]] <NEW_LINE> for j in numba.prange(num_columns): <NEW_LINE> <INDENT> group = values[begin[i] : end[i], j] <NEW_LINE> result[begin[i] : end[i], j] = numba_func(group, group_index, *args) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> return group_transform
Generate a numba jitted transform function specified by values from engine_kwargs. 1. jit the user's function 2. Return a groupby transform function with the jitted function inline Configurations specified in engine_kwargs apply to both the user's function _AND_ the groupby evaluation loop. Parameters ---------- kwargs : dict **kwargs to be passed into the function func : function function to be applied to each window and will be JITed engine_kwargs : dict dictionary of arguments to be passed into numba.jit Returns ------- Numba function
625941b8293b9510aa2c30f3
@api.route('/user', strict_slashes=False) <NEW_LINE> @auth.login_required <NEW_LINE> def get_user_details(): <NEW_LINE> <INDENT> check_authentication_with_token() <NEW_LINE> return jsonify({'firstname': g.current_user.first_name, 'lastname': g.current_user.last_name, 'email': g.current_user.email, 'no of URL shortened': len(list( g.current_user.short_urls))}), 200
Get a details of the current user.
625941b8a17c0f6771cbdeae
def calculate_peer_incitation_score(author_name, a_v, v_p, a_p, p_p) : <NEW_LINE> <INDENT> structure_occurrence = 0 <NEW_LINE> papers = set(a_p[author_name]) <NEW_LINE> for venue in a_v[author_name] : <NEW_LINE> <INDENT> candidates = set(v_p[venue]) <NEW_LINE> for paper_id in candidates : <NEW_LINE> <INDENT> if not p_p.has_key(paper_id) : <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> citations = p_p[paper_id] <NEW_LINE> common = papers.intersection(candidates) <NEW_LINE> structure_occurrence += len(common) <NEW_LINE> <DEDENT> <DEDENT> return structure_occurrence
Calculates te second metagraph feature score.
625941b8de87d2750b85fbe8
def check_file_closed(self, metadata, payload): <NEW_LINE> <INDENT> m = metadata <NEW_LINE> return ( len(payload) < m["chunksize"] or (m["filesize"] % m["chunksize"] == 0 and m["filesize"] / m["chunksize"] == m["chunk_number"] + 1) )
Checks if all chunks were received. Args: metadata: The metadata of the file. payload: The data of the file. Return: True if the payload was the last chunk of the file, False otherwise.
625941b8dd821e528d63b005
def transform_o_d(self, x): <NEW_LINE> <INDENT> return np.linalg.inv(self.T2x).dot(x)
transform from original to diagonal coordinates
625941b88a349b6b435e7fcf
def test_inmediate_transfer_reserved_quantity(self): <NEW_LINE> <INDENT> self.set_product_amount( self.product_lots, self.internal_loc_1, 100, lot_id=self.lot1 ) <NEW_LINE> stock_move = self.env["stock.move"].create( { "name": "Move for test", "product_id": self.product_lots.id, "product_uom_qty": 20.0, "product_uom": self.product_lots.uom_id.id, "location_id": self.internal_loc_1.id, "location_dest_id": self.internal_loc_2.id, } ) <NEW_LINE> stock_move._action_confirm() <NEW_LINE> stock_move._action_assign() <NEW_LINE> wizard = self._create_wizard(self.internal_loc_1, self.internal_loc_2_shelf) <NEW_LINE> wizard.onchange_origin_location() <NEW_LINE> wizard.action_move_location() <NEW_LINE> self.assertEqual(len(stock_move.move_line_ids), 1) <NEW_LINE> self.assertEqual(stock_move.move_line_ids.product_uom_qty, 20.0) <NEW_LINE> self.assertEqual( stock_move.move_line_ids.location_id, self.internal_loc_2_shelf )
Unreserve quantities in old location and reserve the same items on new location
625941b8004d5f362079a191
def _get_bottom_and_top_outgoing_revs_for_remote(self): <NEW_LINE> <INDENT> remote = self._get_remote_branch() <NEW_LINE> current_branch = self._get_current_branch() <NEW_LINE> outgoing_changesets = self._get_outgoing_changesets(current_branch, remote) <NEW_LINE> if outgoing_changesets: <NEW_LINE> <INDENT> top_rev, bottom_rev = self._get_top_and_bottom_outgoing_revs(outgoing_changesets) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> top_rev = None <NEW_LINE> bottom_rev = None <NEW_LINE> <DEDENT> return bottom_rev, top_rev
Returns the bottom and top outgoing revisions. Returns the bottom and top outgoing revisions for the changesets between the current branch and the remote branch.
625941b8167d2b6e312189f8
def paser_site(site_file_name): <NEW_LINE> <INDENT> dataset_name = site_file_name.split("_")[0] <NEW_LINE> motif_index = -1 <NEW_LINE> if not os.path.isdir(dataset_name+"_motif"): <NEW_LINE> <INDENT> os.mkdir(dataset_name+"_motif") <NEW_LINE> <DEDENT> with open(site_file_name) as fin: <NEW_LINE> <INDENT> for line in fin: <NEW_LINE> <INDENT> if line.startswith("MOTIF"): <NEW_LINE> <INDENT> motif_index += 1 <NEW_LINE> content = line.strip().split(" ") <NEW_LINE> motif_consensus = content[1] <NEW_LINE> motif_out_name = os.path.join(dataset_name + "_motif", dataset_name + "_" + str(motif_index) + ".sites") <NEW_LINE> fout = open(motif_out_name, "w") <NEW_LINE> header = dataset_name + "." + str(motif_index) + "." +motif_consensus + ".motif" <NEW_LINE> fout.write(header+"\n") <NEW_LINE> <DEDENT> elif line.startswith("chr"): <NEW_LINE> <INDENT> fout.write(line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> next
inputs: site file output: each file contain sites for each motif
625941b84527f215b584c2b5
@sopel.module.interval(24 * 60 * 60) <NEW_LINE> def _clean_cache(bot): <NEW_LINE> <INDENT> oldest_key_age = 0 <NEW_LINE> oldest_key = '' <NEW_LINE> for key, data in sopel.tools.iteritems(bot.memory['safety_cache']): <NEW_LINE> <INDENT> if data['age'] > oldest_key_age: <NEW_LINE> <INDENT> oldest_key_age = data['age'] <NEW_LINE> oldest_key = key <NEW_LINE> <DEDENT> <DEDENT> if oldest_key in bot.memory['safety_cache']: <NEW_LINE> <INDENT> del bot.memory['safety_cache'][oldest_key]
Cleans up old entries in URL cache
625941b87d847024c06be11b
def update_team_count(self, count): <NEW_LINE> <INDENT> assert count >= 0 and count <= TEAM_COUNT_MAX <NEW_LINE> if count > self.team_count: <NEW_LINE> <INDENT> self.team_count = count <NEW_LINE> logger.debug("update team count[count=%d]" % self.team_count) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False
更新可上阵的队伍数量 Returns: True: 发生了更新 False: 未发生更新
625941b816aa5153ce3622d2
def set_properties(self, properties): <NEW_LINE> <INDENT> pass
Set the given physic property values using the simulator. Args: properties (dict): the physic property values to be set in the simulator.
625941b8d53ae8145f87a0d1
def get_attribute_header_offset(self, record: bytes) -> int: <NEW_LINE> <INDENT> record_header = RECORD_HEADER.parse(record) <NEW_LINE> return record_header.first_attribute_offset
Return offset to first attribute header of MFT Record :param record: The MFT Record
625941b84e696a04525c92af
def split_attributes(str): <NEW_LINE> <INDENT> attributes = [] <NEW_LINE> num_parenthesis = 0 <NEW_LINE> current_attribute = "" <NEW_LINE> for s in str: <NEW_LINE> <INDENT> if s == " " and num_parenthesis == 0: <NEW_LINE> <INDENT> if current_attribute: <NEW_LINE> <INDENT> attributes.append(current_attribute) <NEW_LINE> <DEDENT> current_attribute = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current_attribute += s <NEW_LINE> if s == "(": <NEW_LINE> <INDENT> num_parenthesis += 1 <NEW_LINE> <DEDENT> elif s == ")": <NEW_LINE> <INDENT> num_parenthesis -= 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if current_attribute: <NEW_LINE> <INDENT> attributes.append(current_attribute) <NEW_LINE> <DEDENT> return attributes
Split a HQL query into attributes.
625941b82ae34c7f2600cf8c
def test_change_block_size(basic_grid): <NEW_LINE> <INDENT> bg = basic_grid <NEW_LINE> bg.block_size = 10 <NEW_LINE> assert bg.block_size == 10 <NEW_LINE> for block in bg: <NEW_LINE> <INDENT> assert block.size == 10
Test that all blocks are properly resized when changing the BlockGrid.block_size attribute.
625941b89c8ee82313fbb5cf