code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
def vec_to_so3(vec): <NEW_LINE> <INDENT> return np.array([[0, -vec[2], vec[1]], [vec[2], 0, -vec[0]], [-vec[1], vec[0], 0]]) | LICENSE: Modern Robotics
Converts a 3-vector to an so(3) representation
:param omg: A 3-vector
:return: The skew symmetric representation of omg
Example Input:
vec = np.array([1, 2, 3])
Output:
np.array([[ 0, -3, 2],
[ 3, 0, -1],
[-2, 1, 0]]) | 625941babe7bc26dc91cd49e |
def evolve(self, t): <NEW_LINE> <INDENT> cliques = list() <NEW_LINE> for c in range(3): <NEW_LINE> <INDENT> cliques.append(t * [0]) <NEW_LINE> <DEDENT> for i in range(t): <NEW_LINE> <INDENT> clique = cn.findCliques() <NEW_LINE> r = random.random() <NEW_LINE> node1id = random.choice(list(self.nodes)) <NEW_LINE> node1 = self.nodes[node1id] <NEW_LINE> if r <= 0.5: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> node2id = random.choice(list(self.nodes)) <NEW_LINE> node2 = self.nodes[node2id] <NEW_LINE> if not node1.hasLinkTo(node2): <NEW_LINE> <INDENT> node1.addLinkTo(node2) <NEW_LINE> node2.addLinkTo(node1) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> while len(node1.nodelist) == 0: <NEW_LINE> <INDENT> node1id = random.choice(list(self.nodes)) <NEW_LINE> node1 = self.nodes[node1id] <NEW_LINE> <DEDENT> node2 = random.choice(node1.nodelist) <NEW_LINE> node1.removeLinkTo(node2) <NEW_LINE> <DEDENT> cliques = self.plot(clique, i, cliques) <NEW_LINE> <DEDENT> for index, item in enumerate(cliques): <NEW_LINE> <INDENT> plt.plot(range(len(item)), item, marker='x') <NEW_LINE> plt.xlabel('t') <NEW_LINE> plt.ylabel('amount of cliques') <NEW_LINE> plt.title('Evolving Networks') <NEW_LINE> plt.legend('345') <NEW_LINE> plt.tight_layout() <NEW_LINE> <DEDENT> plt.show() | function that takes a parameter t representing the number of time steps, as well as a network; for each time
step, randomly insert or delete one edge in the network
:param t: number of time steps | 625941bae8904600ed9f1dc2 |
def setDhcpDbAgentIDL(self, sessionId, url, writeDelay, timeOut): <NEW_LINE> <INDENT> pass | Parameters:
- sessionId
- url
- writeDelay
- timeOut | 625941bad7e4931a7ee9ddb5 |
def group_by(keyfunc, iterable): <NEW_LINE> <INDENT> grouped = {} <NEW_LINE> for item in iterable: <NEW_LINE> <INDENT> grouped.setdefault(keyfunc(item), []).append(item) <NEW_LINE> <DEDENT> return grouped | Returns a dict of the elements from given iterable keyed by result
of keyfunc on each element. The value at each key will be a list of
the corresponding elements, in the order they appeared in the iterable. | 625941bae1aae11d1e749b4e |
def create_assists_model(team_abbrev): <NEW_LINE> <INDENT> log_filename = "{}datasets/{}_2015_to_2018.csv".format(filepath, team_abbrev) <NEW_LINE> log_df = load_dataset(log_filename) <NEW_LINE> stats_filename = "{}datasets/team_stats/{}_Stats_By_Year.csv".format(filepath, team_abbrev) <NEW_LINE> stats_df = load_dataset(stats_filename) <NEW_LINE> log_df["AST_SZN_AVG"] = 0 <NEW_LINE> log_df["WIN_PCT"] = 0.0 <NEW_LINE> for index, row in log_df.iterrows(): <NEW_LINE> <INDENT> game_date = log_df.at[index, "GAME_DATE"] <NEW_LINE> tokens = game_date.split("-") <NEW_LINE> year = tokens[0] <NEW_LINE> month = tokens[1] <NEW_LINE> season = "" <NEW_LINE> if int(month) >= 6: <NEW_LINE> <INDENT> beginning_year = int(year) <NEW_LINE> end_year = int(year) + 1 <NEW_LINE> end_year_str = (str(end_year))[-2:] <NEW_LINE> season = "{}-{}".format(str(beginning_year), end_year_str) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end_year = str(year) <NEW_LINE> beginning_year = int(year) - 1 <NEW_LINE> beginning_year_str = str(beginning_year) <NEW_LINE> end_year = end_year[-2:] <NEW_LINE> season = "{}-{}".format(beginning_year_str, end_year) <NEW_LINE> <DEDENT> for stats_index, stats_row in stats_df.iterrows(): <NEW_LINE> <INDENT> year = stats_df.at[stats_index, "YEAR"] <NEW_LINE> if year == season: <NEW_LINE> <INDENT> assists_per_game = stats_df.at[stats_index, "AST"] <NEW_LINE> win_pct = stats_df.at[stats_index, "WIN_PCT"] <NEW_LINE> log_df.at[index, "AST_SZN_AVG"] = assists_per_game <NEW_LINE> log_df.at[index, "WIN_PCT"] = win_pct <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> le = LabelEncoder() <NEW_LINE> matchups = (log_df["MATCHUP"].values).tolist() <NEW_LINE> le.fit(matchups) <NEW_LINE> global labelEncoder <NEW_LINE> labelEncoder = le <NEW_LINE> matchups_transformed = le.transform(matchups) <NEW_LINE> log_df["MATCHUPS_TRANSFORMED"] = matchups_transformed <NEW_LINE> array = log_df.values <NEW_LINE> X = array[:, [30, 31, 32]] <NEW_LINE> Y = array[:,22] <NEW_LINE> Y = Y.astype('int') <NEW_LINE> validation_size = 0.20 <NEW_LINE> seed = 7 <NEW_LINE> X_train, X_validation, Y_train, Y_validation = model_selection.train_test_split(X, Y, test_size=validation_size, random_state=seed) <NEW_LINE> scoring = 'accuracy' <NEW_LINE> dtc = DecisionTreeClassifier() <NEW_LINE> dtc.fit(X_train, Y_train) <NEW_LINE> global verbose <NEW_LINE> if verbose: <NEW_LINE> <INDENT> predictions = dtc.predict(X_validation) <NEW_LINE> testing_predictions = dtc.predict(X_train) <NEW_LINE> print("Mean squared error for assists:") <NEW_LINE> print("For training set: {}".format(mean_squared_error(Y_train, testing_predictions))) <NEW_LINE> print("For testing set: {}".format(mean_squared_error(Y_validation, predictions))) <NEW_LINE> print() <NEW_LINE> <DEDENT> return dtc | Given a dataframe
:param team_abbrev: string representing the team to generate a model that predicts their assists in a game
:param matchup: the matchup to predict the assists for
:return: | 625941ba2eb69b55b151c744 |
def is_active(self): <NEW_LINE> <INDENT> return True | Returns if a user is active or not
:return: | 625941ba3317a56b86939b02 |
@macro() <NEW_LINE> def go_to_pinhole(self): <NEW_LINE> <INDENT> self.output("You are here:") <NEW_LINE> self.execMacro('wa',) <NEW_LINE> self.output("Going to pinhole...") <NEW_LINE> self.execMacro('umv', 'zs', '-10') <NEW_LINE> self.execMacro('umv', 'th', '90') | Macro go_to_pinhole | 625941ba0383005118ecf47e |
def get_result_by_xyz_cell_id(self, node_xyz, cell_id): <NEW_LINE> <INDENT> case_key = self.case_keys[self.icase] <NEW_LINE> result_name = self.result_name <NEW_LINE> cell = self.grid_selected.GetCell(cell_id) <NEW_LINE> nnodes = cell.GetNumberOfPoints() <NEW_LINE> points = cell.GetPoints() <NEW_LINE> point0 = points.GetPoint(0) <NEW_LINE> dist_min = vtk.vtkMath.Distance2BetweenPoints(point0, node_xyz) <NEW_LINE> point_min = point0 <NEW_LINE> imin = 0 <NEW_LINE> for ipoint in range(1, nnodes): <NEW_LINE> <INDENT> point = points.GetPoint(ipoint) <NEW_LINE> dist = vtk.vtkMath.Distance2BetweenPoints(point, node_xyz) <NEW_LINE> if dist < dist_min: <NEW_LINE> <INDENT> dist_min = dist <NEW_LINE> imin = ipoint <NEW_LINE> point_min = point <NEW_LINE> <DEDENT> <DEDENT> node_id = cell.GetPointId(imin) <NEW_LINE> xyz = np.array(point_min, dtype='float32') <NEW_LINE> case = self.result_cases[case_key] <NEW_LINE> assert isinstance(case_key, integer_types), case_key <NEW_LINE> (obj, (i, res_name)) = case <NEW_LINE> subcase_id = obj.subcase_id <NEW_LINE> case = obj.get_result(i, res_name) <NEW_LINE> result_values = case[node_id] <NEW_LINE> assert not isinstance(xyz, int), xyz <NEW_LINE> return result_name, result_values, node_id, xyz | won't handle multiple cell_ids/node_xyz | 625941ba004d5f362079a1d0 |
def iter_layer_states(model, audios, batch_size=128): <NEW_LINE> <INDENT> lens = (numpy.array(map(len, audios)) + model.config['filter_length']) // model.config['stride'] <NEW_LINE> rs = (r for batch in util.grouper(audios, batch_size) for r in model.task.pile(vector_padder(batch))) <NEW_LINE> for (r,l) in itertools.izip(rs, lens): <NEW_LINE> <INDENT> yield r[-l:,:,:] | Pass audios through the model and for each audio return the state of each timestep and each layer. | 625941baa17c0f6771cbdeed |
def get_raum(self): <NEW_LINE> <INDENT> return self._raum | Auslesen welcher Raum gewünscht ist | 625941baaad79263cf3908d5 |
def get_cookie_header(self, req): <NEW_LINE> <INDENT> mocked_req = MockRequest(req) <NEW_LINE> self.cookiejar.add_cookie_header(mocked_req) <NEW_LINE> return mocked_req.get_new_headers().get('Cookie') | :param req: object with httplib.Request interface
Actually, it have to have `url` and `headers` attributes | 625941ba1d351010ab8559b7 |
def parse_task_obj(task_obj): <NEW_LINE> <INDENT> timestamp, target_path = task_obj.split('-', 1) <NEW_LINE> timestamp = Timestamp(timestamp) <NEW_LINE> target_account, target_container, target_obj = split_path('/' + target_path, 3, 3, True) <NEW_LINE> return timestamp, target_account, target_container, target_obj | :param task_obj: a task object name in format of
"<timestamp>-<target_account>/<target_container>" +
"/<target_obj>"
:return: 4-tuples of (delete_at_time, target_account, target_container,
target_obj) | 625941ba97e22403b379ce32 |
def folder_browser(self): <NEW_LINE> <INDENT> self.output_folder = QtGui.QFileDialog.getExistingDirectory(self, "Select Output Folder", "C:\\", QtGui.QFileDialog.ShowDirsOnly) <NEW_LINE> self.txtOutputFolder.setText(self.output_folder) | Set QT QLineEdit control to user-specified folder name.
:param txtControl: name of the QLineEdit control | 625941bacdde0d52a9e52ec9 |
def test4_1(): <NEW_LINE> <INDENT> return "a" in s | searching 'a' in s = "ab" * 10000 + "c" | 625941ba66673b3332b91f2c |
def get_actual_username_and_password(self): <NEW_LINE> <INDENT> return ( self.get_config_value(self._section_epp_login, 'username', OMIT_ERROR), self.get_config_value(self._section_epp_login, 'password', OMIT_ERROR), ) | Returns tuple (username, password) what was used to login | 625941babaa26c4b54cb0fbd |
def bootup(port, delay): <NEW_LINE> <INDENT> click.echo( colored(f"Port: ", "yellow") + colored(f"{port}", "red", attrs=['underline']) ) <NEW_LINE> click.echo( colored(f"Delay: ", "yellow") + colored(f"{delay}s", "red", attrs=['underline']) ) <NEW_LINE> global DELAY <NEW_LINE> DELAY = delay <NEW_LINE> app = make_app() <NEW_LINE> app.listen(port) <NEW_LINE> IOLoop.current().start() | Main entry point
:param port: Running port | 625941ba63d6d428bbe44389 |
def tp_hyper(data): <NEW_LINE> <INDENT> return c2ri(ri2c(data).transpose()) | Hypercomplex tranpose.
Use when both dimension are complex.
Parameters
----------
data : ndarray
Array of hypercomplex NMR data.
Returns
-------
ndata : ndarray
Array of hypercomplex NMR data with axes transposed. | 625941bae5267d203edcdb3a |
def get_file_count_by_extension(file_path: str, file_ext: str) -> int: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if os.path.exists(file_path): <NEW_LINE> <INDENT> file_ext_count = len( list(filter(lambda x: os.path.splitext(x)[-1] == '.'+file_ext, list(map(lambda y: os.path.join(file_path, y), os.listdir(file_path))))) ) <NEW_LINE> return file_ext_count <NEW_LINE> <DEDENT> <DEDENT> except (OSError, Exception) as e: <NEW_LINE> <INDENT> logger.error(error=e) | Get the count of files of a specified file extension from a given path | 625941ba3c8af77a43ae3637 |
def testBinary(self): <NEW_LINE> <INDENT> index = open("temp/dists/foo/main/binary-i386/Packages") <NEW_LINE> factory = picax.package.PackageFactory(index, "temp", "foo", "main") <NEW_LINE> packages = factory.get_packages() <NEW_LINE> assert len(packages) > 0 <NEW_LINE> index.close() | Test a binary package index. | 625941ba9b70327d1c4e0c6d |
def get_short_trend_by_code(self, code=None, sdate=None, edate=None): <NEW_LINE> <INDENT> in_params = {"date":sdate, "sdate":sdate, "edate":edate, "shcode":code} <NEW_LINE> out_params = ["date", "price", "sign", "change", "diff", "volume", "value", "gm_vo", "gm_va", "gm_per", "gm_avg", "gm_vo_sum"] <NEW_LINE> result = self._execute_query("t1927", "t1927InBlock", "t1927OutBlock1", *out_params, **in_params) <NEW_LINE> for item in result: <NEW_LINE> <INDENT> item["code"]=code <NEW_LINE> <DEDENT> return result | TR: t1927 공매도일별추이
:param code: str 종목코드
:param sdate: str 시작일자
:param edate: str 종료일자
:return: list 시장 별 종목 리스트 | 625941ba5e10d32532c5edc8 |
def atoms(self): <NEW_LINE> <INDENT> return set(self.array_form) | Returns all the elements of a permutation
Examples
========
>>> from sympy.combinatorics import Permutation
>>> Permutation([0, 1, 2, 3, 4, 5]).atoms()
{0, 1, 2, 3, 4, 5}
>>> Permutation([[0, 1], [2, 3], [4, 5]]).atoms()
{0, 1, 2, 3, 4, 5} | 625941bad10714528d5ffb79 |
def hal_backlight_on(self): <NEW_LINE> <INDENT> if self.backlight_pin: <NEW_LINE> <INDENT> self.backlight_pin.value(1) | Allows the hal layer to turn the backlight on. | 625941ba67a9b606de4a7d56 |
def update_file_system_snapshots_with_http_info(self, attributes, **kwargs): <NEW_LINE> <INDENT> all_params = ['attributes', 'ids', 'name', 'latest_replica'] <NEW_LINE> all_params.append('callback') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_file_system_snapshots" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('attributes' not in params) or (params['attributes'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `attributes` when calling `update_file_system_snapshots`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> query_params = [] <NEW_LINE> if 'ids' in params: <NEW_LINE> <INDENT> query_params.append(('ids', params['ids'])) <NEW_LINE> collection_formats['ids'] = 'csv' <NEW_LINE> <DEDENT> if 'name' in params: <NEW_LINE> <INDENT> query_params.append(('name', params['name'])) <NEW_LINE> <DEDENT> if 'latest_replica' in params: <NEW_LINE> <INDENT> query_params.append(('latest_replica', params['latest_replica'])) <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> if 'attributes' in params: <NEW_LINE> <INDENT> body_params = params['attributes'] <NEW_LINE> <DEDENT> header_params['Accept'] = self.api_client. select_header_accept(['application/json']) <NEW_LINE> header_params['Content-Type'] = self.api_client. select_header_content_type(['application/json']) <NEW_LINE> auth_settings = ['AuthTokenHeader'] <NEW_LINE> return self.api_client.call_api('/1.12/file-system-snapshots', 'PATCH', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='FileSystemSnapshotResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | Update an existing file system snapshot.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_file_system_snapshots_with_http_info(attributes, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param SnapshotSuffix attributes: The new attributes, only modifiable fields may be specified. (required)
:param list[str] ids: A comma-separated list of resource IDs. This cannot be provided together with the name or names query parameters.
:param str name: The name of the file system or snapshot to be updated.
:param bool latest_replica: Used when destroying a snapshot. If not present or false, and the snapshot is the latest replicated snapshot, then destroy will fail. If true or the snapshot is not the latest replicated snapshot, then destroy will be successful.
:return: FileSystemSnapshotResponse
If the method is called asynchronously,
returns the request thread. | 625941ba6aa9bd52df036c3c |
def _add_recurring_item(self): <NEW_LINE> <INDENT> return RecurringLineItem.objects.create(cart=self.cart, name="Recurring", quantity=1, sku="42", duration=12, duration_unit="MONTH", recurring_price=Decimal("20.00"), thankyou="Thank you") | Add a RecurringLineItem to self.cart. | 625941ba26238365f5f0ed04 |
def run(self, thread_count=1, count=0, interval=2): <NEW_LINE> <INDENT> suites = self.__classification_suite() <NEW_LINE> if thread_count>1: <NEW_LINE> <INDENT> with ThreadPoolExecutor(max_workers=thread_count) as ts: <NEW_LINE> <INDENT> for i in suites: <NEW_LINE> <INDENT> res = ReRunResult(count=count, interval=interval) <NEW_LINE> self.result.append(res) <NEW_LINE> ts.submit(i.run, result=res).add_done_callback(res.stopTestRun) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> res = ReRunResult(count=count, interval=interval) <NEW_LINE> self.result.append(res) <NEW_LINE> self.suite.run(res) <NEW_LINE> res.stopTestRun() <NEW_LINE> <DEDENT> result = self.__get_reports() <NEW_LINE> return result | The entrance to running tests
Note: if multiple test classes share a global variable, errors may occur due to resource competition
:param thread_count:Number of threads. default 1
:param count: Rerun times, default 0
:param interval: Rerun interval, default 2
:return: Test run results | 625941bab545ff76a8913cb8 |
def test_upload_file(self): <NEW_LINE> <INDENT> data = dict(additionalMetadata='additionalMetadata_example', file=(BytesIO(b'some file data'), 'file.txt')) <NEW_LINE> response = self.client.open( '/johnct4/JM-Petstore/1.0.0/pet/{petId}/uploadImage'.format(petId=789), method='POST', data=data, content_type='multipart/form-data') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) | Test case for upload_file
uploads an image | 625941ba15baa723493c3e0c |
def __init__(self): <NEW_LINE> <INDENT> super(LeNetEncoder, self).__init__() <NEW_LINE> self.restored = False <NEW_LINE> self.encoder = nn.Sequential( nn.Conv2d(1, 20, kernel_size=5), nn.MaxPool2d(kernel_size=2), nn.ReLU(), nn.Conv2d(20, 50, kernel_size=5), nn.Dropout2d(), nn.MaxPool2d(kernel_size=2), nn.ReLU() ) <NEW_LINE> self.fc1 = nn.Linear(50 * 4 * 4, 500) | Init LeNet encoder. | 625941ba99cbb53fe6792a81 |
def generate_short_id_raw(): <NEW_LINE> <INDENT> return unpack("<Q", urandom(8))[0] | Short ID generator - v4 - without any encoding | 625941ba293b9510aa2c3133 |
def is_spent(self): <NEW_LINE> <INDENT> return self.used | query method
:return: true if used, false if not | 625941ba96565a6dacc8f56f |
def dummyLoad(self, other, pBar=None): <NEW_LINE> <INDENT> other.getAllShapeVertices(other.simplex.shapes, pBar=pBar) <NEW_LINE> points, faces, counts, uvs, uvFaces = other.getMeshTopology(other.mesh) <NEW_LINE> dummyMesh = self.buildRawTopology( other.name, points, faces, counts, uvs, uvFaces ) <NEW_LINE> self.loadNodes(self.simplex, dummyMesh) <NEW_LINE> for shape in self.simplex.shapes: <NEW_LINE> <INDENT> shape.thing = DummyShape(shape.name, self.shapeNode) <NEW_LINE> <DEDENT> for oShape, nShape in zip(other.simplex.shapes, self.simplex.shapes): <NEW_LINE> <INDENT> nShape.verts = copy.copy(oShape.verts) <NEW_LINE> <DEDENT> self.pushAllShapeVertices(self.simplex.shapes) <NEW_LINE> restVerts = self.simplex.restShape.verts <NEW_LINE> for slider in self.simplex.sliders: <NEW_LINE> <INDENT> slider.thing = DummyAttr(slider.name, 0.0, self.ctrl) <NEW_LINE> <DEDENT> for fo in self.simplex.falloffs: <NEW_LINE> <INDENT> fo.thing = DummyFalloff(fo.name, self.scene) <NEW_LINE> fo.setVerts(restVerts) | Method to copy the information in a DCC to a DummyDCC
Parameters
----------
other : DCC
The DCC to load into the dummy
pBar : QProgressDialog, optional
An optional progress dialog (Default value = None)
Returns
-------
: DummyDCC :
The new DummyDCC | 625941ba32920d7e50b28067 |
def _initialize_weights(self, nvis, rng=None, irange=None): <NEW_LINE> <INDENT> if rng is None: <NEW_LINE> <INDENT> rng = self.rng <NEW_LINE> <DEDENT> if irange is None: <NEW_LINE> <INDENT> irange = self.irange <NEW_LINE> <DEDENT> self.weights = sharedX( (.5 - rng.rand(nvis, self.nhid)) * irange, name='W', borrow=True ) | .. todo::
WRITEME | 625941ba15fb5d323cde09a4 |
def VLAQuack(uv, err, Stokes = " ", BIF=1, EIF=0, Sources=[" "], FreqID=0, subA=0, timeRange=[0.,0.], Antennas=[0], flagVer=1, begDrop=0.0, endDrop=0.0, Reason="Quack", logfile = ""): <NEW_LINE> <INDENT> if (begDrop<=0) and (endDrop<=0): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> quack=ObitTask.ObitTask("Quack") <NEW_LINE> setname(uv, quack) <NEW_LINE> quack.Stokes = Stokes <NEW_LINE> quack.BIF = BIF <NEW_LINE> quack.EIF = EIF <NEW_LINE> quack.Sources = Sources <NEW_LINE> quack.subA = subA <NEW_LINE> quack.FreqID = FreqID <NEW_LINE> quack.timeRange = timeRange <NEW_LINE> quack.Antennas = Antennas <NEW_LINE> quack.flagVer = flagVer <NEW_LINE> quack.begDrop = begDrop <NEW_LINE> quack.endDrop = endDrop <NEW_LINE> quack.Reason = Reason <NEW_LINE> quack.logFile = logfile <NEW_LINE> quack.g | Flags beginning and end of each scan
Trim start and end of each selected scan,
nothing done if begDrop=endDrop=0.0
See documentation for task Quack for details
uv = UV data object to flag
err = Obit error/message stack
Stokes = Limit flagging by Stokes
BIF = Limit flagging to BIF-EIF
EIF = Limit flagging
Sources = Sources selected
subA = Subarray number 0=>all
FreqID = Freq. ID to flag. -1=>all
timeRange= Time range to process
Antennas = List of antennas to include
flagVer = Flag table version, 0 => highest
begDrop = Time (min) to drop from beginning
endDrop = Time (min) to drop from end
Reason = Reason (max 24 char.)
logfile = Log file for task | 625941ba6fece00bbac2d5d6 |
def _remove_boundaries(self, interval): <NEW_LINE> <INDENT> begin = interval.begin <NEW_LINE> end = interval.end <NEW_LINE> if self.boundary_table[begin] == 1: <NEW_LINE> <INDENT> del self.boundary_table[begin] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.boundary_table[begin] -= 1 <NEW_LINE> <DEDENT> if self.boundary_table[end] == 1: <NEW_LINE> <INDENT> del self.boundary_table[end] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.boundary_table[end] -= 1 | Removes the boundaries of the interval from the boundary table. | 625941ba851cf427c661a3ac |
def full_kernel(v, i, ksize, full_output=False): <NEW_LINE> <INDENT> vi = zeros(ksize) <NEW_LINE> ii = zeros(ksize) <NEW_LINE> vref = mean(v) <NEW_LINE> iref = mean(i) <NEW_LINE> v_corrected = v-vref <NEW_LINE> i_corrected = i-iref <NEW_LINE> for k in range(ksize): <NEW_LINE> <INDENT> vi[k] = mean(v_corrected[k:] * i_corrected[:len(i) - k]) <NEW_LINE> ii[k] = mean(i_corrected[k:] * i_corrected[:len(i) - k]) <NEW_LINE> <DEDENT> K = linalg.solve_toeplitz(ii, vi) <NEW_LINE> if full_output: <NEW_LINE> <INDENT> v0 = vref - iref * sum(K) <NEW_LINE> return K, v0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return K | Calculates the full kernel from the recording v and the input
current i. The last ksize steps of i should be null.
ksize = size of the resulting kernel
full_output = returns K,v0 if True (v0 is the resting potential) | 625941ba6aa9bd52df036c3d |
def unique_name(name, sequence): <NEW_LINE> <INDENT> return '{}_{}'.format(name.rsplit('_S', 1)[0], sequence_hash(sequence)) | Create a unique name based on the current name and the sequence
The returned name looks like name_S1234. If the current name contains
already a S_.... suffix, it is removed before the new suffix is appended.
name -- current name | 625941badd821e528d63b045 |
def test_vm_filter_save_and_cancel_load(request, vm_view): <NEW_LINE> <INDENT> filter_name = fauxfactory.gen_alphanumeric() <NEW_LINE> vm_view.entities.search.save_filter( "fill_field(Virtual Machine : Name, =)", filter_name) <NEW_LINE> @request.addfinalizer <NEW_LINE> def cleanup(): <NEW_LINE> <INDENT> vm_view.entities.search.load_filter(filter_name) <NEW_LINE> vm_view.entities.search.delete_filter() <NEW_LINE> <DEDENT> vm_view.flash.assert_no_error() <NEW_LINE> vm_view.entities.search.reset_filter() <NEW_LINE> vm_view.entities.search.load_filter(filter_name, cancel=True) <NEW_LINE> vm_view.flash.assert_no_error() | Polarion:
assignee: gtalreja
casecomponent: WebUI
caseimportance: medium
initialEstimate: 1/10h | 625941ba8c0ade5d55d3e85a |
@app.route('/watcher/<watcher>', methods=['GET', 'DELETE']) <NEW_LINE> def watcher_handler(watcher): <NEW_LINE> <INDENT> watcher = stringify(watcher) <NEW_LINE> if request.method == 'DELETE': <NEW_LINE> <INDENT> if client.rm_watcher(watcher) is True: <NEW_LINE> <INDENT> return jsonify({'status': 200, 'reason': 'Watcher deleted successfully'}), 200 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return jsonify({'status': 400, 'reason': 'Cannot delete watcher'}), 400 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return jsonify({ 'status': client.status(watcher), 'pids': client.list(watcher) }) | handle specific watcher given as parameter <watcher>
GET Retrieve a list of PIDs handled by watcher
DELETE Delete the watcher | 625941ba8e05c05ec3eea20c |
def append_operation(self, operation, start=True): <NEW_LINE> <INDENT> assert (isinstance(operation, Operation)) <NEW_LINE> iter = self.store.append([operation]) <NEW_LINE> operation.set_iter(self.store, iter) <NEW_LINE> if start: <NEW_LINE> <INDENT> operation.start() <NEW_LINE> <DEDENT> self.timeout_update() | Append an operation to the store
@param operation an Operation object
@param start if the operation should be started | 625941baa17c0f6771cbdeee |
def adapt(self, query, result): <NEW_LINE> <INDENT> if not result: <NEW_LINE> <INDENT> raise AdaptationError("Cannot adapt from empty result") <NEW_LINE> <DEDENT> sim,best = result[0] <NEW_LINE> adaptable = [k for (k,v) in query.items() if v.adaptable and query[k] != best[k]] <NEW_LINE> if not adaptable: <NEW_LINE> <INDENT> raise AdaptationError("No adaptable values differ") <NEW_LINE> <DEDENT> adapted = best.adapt(query) <NEW_LINE> if query.similarity(adapted) < sim: <NEW_LINE> <INDENT> raise AdaptationError("Adapted result is worse than best match") <NEW_LINE> <DEDENT> return ('adapted', adapted) | Adapt a result to a query, if possible.
The return value is a tuple ('adapted', case), to conform to
the format of the return values of match(). | 625941bae64d504609d746db |
def delete_any(self, table_name, primary_column, item_id): <NEW_LINE> <INDENT> delete_command = "DELETE FROM %s WHERE %s = %s" % (table_name, primary_column, item_id) <NEW_LINE> self.cursor.execute(delete_command) | method accepts a table name and deletes the item with the specified. | 625941ba15baa723493c3e0d |
@cross_origin <NEW_LINE> def show_merchandise_data(request, categories, states="Total"): <NEW_LINE> <INDENT> start_time = time.time() <NEW_LINE> logger.info("New API request: {}".format(request.get_full_path())) <NEW_LINE> start_date = request.GET.get('startDate') <NEW_LINE> end_date = request.GET.get('endDate') <NEW_LINE> categories_list = categories.split(',') <NEW_LINE> states_list = states.split(',') <NEW_LINE> try: <NEW_LINE> <INDENT> merch = Merchandise(categories_list, states_list, start_date, end_date) <NEW_LINE> <DEDENT> except (LookupNotFoundError, InvalidDateError) as error: <NEW_LINE> <INDENT> logger.info("HTTP 404 ERROR: Request '{}': {}".format(request.get_full_path(), str(error))) <NEW_LINE> return JsonResponse(error.to_json(), status=404) <NEW_LINE> <DEDENT> merch_json = merch.get_json() <NEW_LINE> if merch.response_status == 'error': <NEW_LINE> <INDENT> return JsonResponse(merch_json, status=404) <NEW_LINE> <DEDENT> end_time = time.time() <NEW_LINE> ms_elapsed = (end_time - start_time)*1000 <NEW_LINE> result = parse_merchandise(merch_json,request, start_date, end_date, ms_elapsed) <NEW_LINE> logger.info("HTTP 200 OK: Request '{}' successfully returned. Time taken: {}ms".format(request.get_full_path(), ms_elapsed)) <NEW_LINE> return JsonResponse(result) | get the request, return merchandise data
:param request: contain date
:param categories: Categories string
:param states: str, List of states
:return: JSON of merch data | 625941ba287bf620b61d3908 |
def forwards(migrator, model): <NEW_LINE> <INDENT> model.Rating.create_table() | Create rating table. | 625941ba31939e2706e4cd0a |
def __init__(self, capacity): <NEW_LINE> <INDENT> self._size = 0 <NEW_LINE> self._capacity = capacity <NEW_LINE> self._node = dict() <NEW_LINE> self._dlist = DLinkedList() | :type capacity: int | 625941bae5267d203edcdb3b |
def _sympysage_integral(self): <NEW_LINE> <INDENT> from sage.misc.functional import integral <NEW_LINE> f, limits = self.function._sage_(), list(self.limits) <NEW_LINE> for limit in limits: <NEW_LINE> <INDENT> if len(limit) == 1: <NEW_LINE> <INDENT> x = limit[0] <NEW_LINE> f = integral(f, x._sage_(), hold=True) <NEW_LINE> <DEDENT> elif len(limit) == 2: <NEW_LINE> <INDENT> x, b = limit <NEW_LINE> f = integral(f, x._sage_(), b._sage_(), hold=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x, a, b = limit <NEW_LINE> f = integral(f, (x._sage_(), a._sage_(), b._sage_()), hold=True) <NEW_LINE> <DEDENT> <DEDENT> return f | EXAMPLES::
sage: from sympy import Symbol, Integral
sage: sx = Symbol('x')
sage: assert integral(x, x, hold=True)._sympy_() == Integral(sx, sx)
sage: assert integral(x, x, hold=True) == Integral(sx, sx)._sage_()
sage: assert integral(x, x, 0, 1, hold=True)._sympy_() == Integral(sx, (sx,0,1))
sage: assert integral(x, x, 0, 1, hold=True) == Integral(sx, (sx,0,1))._sage_() | 625941ba6e29344779a624af |
def init_website_management_client( experiment_secrets: Secrets, experiment_configuration: Configuration) -> WebSiteManagementClient: <NEW_LINE> <INDENT> secrets = load_secrets(experiment_secrets) <NEW_LINE> configuration = load_configuration(experiment_configuration) <NEW_LINE> with auth(secrets) as authentication: <NEW_LINE> <INDENT> base_url = secrets.get('cloud').endpoints.resource_manager <NEW_LINE> client = WebSiteManagementClient( credential=authentication, subscription_id=configuration.get('subscription_id'), base_url=base_url) <NEW_LINE> return client | Initializes Website management client for webapp resource under Azure
Resource manager. | 625941ba7d847024c06be15b |
def load(path, module_name=None, include_dir=None): <NEW_LINE> <INDENT> real_module = bool(module_name) <NEW_LINE> thrift = parse(path, module_name, include_dir=include_dir) <NEW_LINE> if real_module: <NEW_LINE> <INDENT> sys.modules[module_name] = thrift <NEW_LINE> <DEDENT> return thrift | Load thrift_file as a module
The module loaded and objects inside may only be pickled if module_name
was provided. | 625941ba283ffb24f3c557a6 |
def clean_name(self): <NEW_LINE> <INDENT> name = self.cleaned_data['name'] <NEW_LINE> if self.job and self.job.name == name: <NEW_LINE> <INDENT> return name <NEW_LINE> <DEDENT> if Job.objects.filter( user=self.request.user, name=self.cleaned_data.get('name') ).exists(): <NEW_LINE> <INDENT> logger.info("You already have a job with the same name") <NEW_LINE> raise forms.ValidationError( "You already have a job with the same name" ) <NEW_LINE> <DEDENT> return name | Validates the name of the job. For a user, a job should must a unique name.
:return: String value of the name field | 625941ba8a349b6b435e800f |
def validate_product_data(product): <NEW_LINE> <INDENT> if product['product_name'] == '': <NEW_LINE> <INDENT> return {'warning': 'product_name is a required field'}, 400 <NEW_LINE> <DEDENT> elif product['product_category'] == '': <NEW_LINE> <INDENT> return {'warning': 'product_category is a required field'}, 400 <NEW_LINE> <DEDENT> if product['product_name'].strip(' ').isdigit(): <NEW_LINE> <INDENT> return {'warning': 'Enter a non digit product_name'}, 400 <NEW_LINE> <DEDENT> if not product["product_name"].strip(): <NEW_LINE> <INDENT> return {"warning": "Enter a valid product_name"}, 400 <NEW_LINE> <DEDENT> if product['product_category'].strip(' ').isdigit(): <NEW_LINE> <INDENT> return {'warning': 'Enter non digit product_category'}, 400 <NEW_LINE> <DEDENT> if not product["product_category"].strip(): <NEW_LINE> <INDENT> return {"warning": "Enter valid product_category"}, 400 <NEW_LINE> <DEDENT> if len(product['product_name']) > 50: <NEW_LINE> <INDENT> return {'warning': 'product_name is too long'}, 400 | this funtion validates the product data | 625941bacb5e8a47e48b7949 |
def _on_open_all_cards(self): <NEW_LINE> <INDENT> for member in self._model.get_members(): <NEW_LINE> <INDENT> packet = outcoming.SetHandValuePacket(member, member.get_hand_value()) <NEW_LINE> self._send_to_all(packet) | При вскрытии карт рассылаем всем суммы карт других игроков. | 625941ba004d5f362079a1d1 |
def _count_values(self): <NEW_LINE> <INDENT> indices = {yi: [i] for i, yi in enumerate(self.y) if self.status[i]} <NEW_LINE> return indices | Return dict mapping relevance level to sample index | 625941bab7558d58953c4db5 |
def post_init(cr, registry): <NEW_LINE> <INDENT> from openerp import SUPERUSER_ID <NEW_LINE> from openerp.addons.base.ir.ir_config_parameter import _default_parameters <NEW_LINE> ICP = registry['ir.config_parameter'] <NEW_LINE> for k, func in _default_parameters.items(): <NEW_LINE> <INDENT> v = ICP.get_param(cr, SUPERUSER_ID, k) <NEW_LINE> _, g = func() <NEW_LINE> ICP.set_param(cr, SUPERUSER_ID, k, v, g) | Rewrite ICP's to force groups | 625941ba15fb5d323cde09a5 |
def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if obj == request.user: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if request.method == DELETE: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if request.method in SAFE_METHODS: <NEW_LINE> <INDENT> return True | Show/edit/delete object permission.
:param request: django request instance.
:type request: django.http.request.HttpRequest.
:param view: view set.
:type view: mk42.apps.users.api.viewsets.user.UserViewset.
:param obj: user model instance.
:type obj: mk42.apps.users.models.user.User.
:return: permission is granted.
:rtype: bool. | 625941ba92d797404e304024 |
def get_profile(self, account_id=None, web_property_id=None, name=None, id=None, **kwargs): <NEW_LINE> <INDENT> profile_store = self.service.management().profiles() <NEW_LINE> kwds = {} <NEW_LINE> if account_id is not None: <NEW_LINE> <INDENT> kwds['accountId'] = account_id <NEW_LINE> <DEDENT> if web_property_id is not None: <NEW_LINE> <INDENT> kwds['webPropertyId'] = web_property_id <NEW_LINE> <DEDENT> profiles = profile_store.list(**kwds).execute() <NEW_LINE> return _get_match(profiles, name, id, **kwargs) | Retrieve the right profile for the given account, web property, and
profile attribute (name, id, or arbitrary parameter in kwargs)
Parameters
----------
account_id : str, optional
web_property_id : str, optional
name : str, optional
id : str, optional | 625941bade87d2750b85fc29 |
def test(): <NEW_LINE> <INDENT> data = np.genfromtxt("NiCr-xcr-tem.txt") <NEW_LINE> m, n = data.shape <NEW_LINE> output = open("./sigma-NiCr.txt", "w") <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> T = data[i][1] <NEW_LINE> x0 = [data[i][0]] <NEW_LINE> db = Database("NiAlCrHuang1999.tdb") <NEW_LINE> comps = ["NI", "CR", "VA"] <NEW_LINE> phasenames = ["FCC_A1", "LIQUID"] <NEW_LINE> vni = "6.718*10.0**(-6.0) + (2.936*10.0**(-5)*10.0**(-6.0))*T**1.355" <NEW_LINE> vcr = "7.23*10.0**(-6.0)" <NEW_LINE> purevms = [[vni, vcr], [vni, vcr]] <NEW_LINE> limit = [10 ** (-20), 0.6] <NEW_LINE> dx = 0.01 <NEW_LINE> sigma = SigmaSolLiq( T=T, x0=x0, db=db, comps=comps, phasenames=phasenames, purevms=purevms, limit=limit, dx=dx, ) <NEW_LINE> InitialAlloyCompositionValue = sigma.Initial_Alloy_Composition.values <NEW_LINE> InterfacialCompositionValue = sigma.Interfacial_Composition.values <NEW_LINE> PartialSigmaValue = sigma.Partial_Interfacial_Energy.values <NEW_LINE> SigmaValue = sigma.Interfacial_Energy.values <NEW_LINE> output.write("%s%s" % ("%.12e\t"*8, "\n") % ( T, InitialAlloyCompositionValue[0], InitialAlloyCompositionValue[1], InterfacialCompositionValue[0], InterfacialCompositionValue[1], PartialSigmaValue[0], PartialSigmaValue[1], SigmaValue )) <NEW_LINE> <DEDENT> output.close() | Calculate solid/liquid interfacial energies of the Ni-Cr system. | 625941ba30bbd722463cbc5e |
def get_terminal_width(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import termios <NEW_LINE> import fcntl <NEW_LINE> import struct <NEW_LINE> call = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack('hhhh', 0, 0, 0, 0)) <NEW_LINE> height, width = struct.unpack('hhhh', call)[:2] <NEW_LINE> terminal_width = width <NEW_LINE> <DEDENT> except (SystemExit, KeyboardInterrupt): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> terminal_width = int(os.environ.get('COLUMNS', 80)) - 1 <NEW_LINE> <DEDENT> return terminal_width | Borrowed from the py lib. | 625941baa934411ee3751535 |
def reset_password(self, new_password): <NEW_LINE> <INDENT> self.set_password(new_password) <NEW_LINE> self.reset_validation_token() | Resets the password for this user to the given `new_password`. | 625941ba3539df3088e2e1e6 |
def list( self, resource_group_name: str, load_balancer_name: str, **kwargs: Any ) -> AsyncIterable["_models.InboundNatRuleListResult"]: <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2018-12-01" <NEW_LINE> accept = "application/json" <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> if not next_link: <NEW_LINE> <INDENT> url = self.list.metadata['url'] <NEW_LINE> path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'loadBalancerName': self._serialize.url("load_balancer_name", load_balancer_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> async def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize('InboundNatRuleListResult', pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, AsyncList(list_of_elem) <NEW_LINE> <DEDENT> async def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return AsyncItemPaged( get_next, extract_data ) | Gets all the inbound nat rules in a load balancer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param load_balancer_name: The name of the load balancer.
:type load_balancer_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either InboundNatRuleListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_12_01.models.InboundNatRuleListResult]
:raises: ~azure.core.exceptions.HttpResponseError | 625941bad58c6744b4257afb |
def get_preferred_max_military_portion_for_single_battle() -> float: <NEW_LINE> <INDENT> if fo.currentTurn() < 40: <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> best_ship_equivalents = (get_concentrated_tot_mil_rating() / cur_best_mil_ship_rating()) ** 0.5 <NEW_LINE> _MAX_SHIPS_BEFORE_PREFERRING_LESS_THAN_FULL_ENGAGEMENT = 3 <NEW_LINE> if best_ship_equivalents <= _MAX_SHIPS_BEFORE_PREFERRING_LESS_THAN_FULL_ENGAGEMENT: <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> ratio_exponent = 0.35 <NEW_LINE> return 1.0 / (best_ship_equivalents + 1 - _MAX_SHIPS_BEFORE_PREFERRING_LESS_THAN_FULL_ENGAGEMENT) ** ratio_exponent | Determine and return the preferred max portion of military to be allocated to a single battle.
May be used to downgrade various possible actions requiring military support if they would require an excessive
allocation of military forces. At the beginning of the game this max portion starts as 1.0, then is slightly
reduced to account for desire to reserve some defenses for other locations, and then in mid to late game, as the
size of the the military grows, this portion is further reduced to promote pursuit of multiple battlefronts in
parallel as opposed to single battlefronts against heavily defended positions.
:return: a number in range (0:1] for preferred max portion of military to be allocated to a single battle | 625941ba7b180e01f3dc469f |
def pre_order_traversal(self, node=root): <NEW_LINE> <INDENT> if node: <NEW_LINE> <INDENT> yield node <NEW_LINE> self.in_order_traversal(node.left) <NEW_LINE> self.in_order_traversal(node.right) | Pre-order traversal visits the current node before its child nodes (hence the name "pre-order"). | 625941ba4428ac0f6e5ba68d |
def select_similar(type: typing.Union[int, str] = 'WEIGHT', compare: typing.Union[int, str] = 'EQUAL', threshold: float = 0.1): <NEW_LINE> <INDENT> pass | Select similar curve points by property type
:param type: Type
:type type: typing.Union[int, str]
:param compare: Compare
:type compare: typing.Union[int, str]
:param threshold: Threshold
:type threshold: float | 625941ba50812a4eaa59c1bf |
def __getattr__(self, attr): <NEW_LINE> <INDENT> if self._file is None: <NEW_LINE> <INDENT> raise IOError("file '%s' is not open" % self._filename) <NEW_LINE> <DEDENT> return getattr(self._file, attr) | Proxy all other attributes of file.
Raises IOError if file is not open.
:param attr: attribute name
:type attr: str
:raises: IOError
:returns: mixed | 625941bafff4ab517eb2f2d5 |
def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> return RE(self, *args) <NEW_LINE> <DEDENT> return self | Calling an RE object returns a reference to that same object. This
is done to support no-ops like then() and followed_by() such that
they can be invoked as attributes or methods.
If args are passed, then call returns a new RE that has the given
args appended. This allows 'of' to work as a conjunction or a way
to add previously generated REs
Returns:
RE | 625941bab57a9660fec3371b |
def add(self, proxy, score=INITIAL_SCORE): <NEW_LINE> <INDENT> if not self.db.zscore(REDIS_KEY, proxy): <NEW_LINE> <INDENT> self.db.zadd(REDIS_KEY, score, proxy) | 添加代理, 设置分数
:param proxy: 代理
:param score: 分数
:return: | 625941ba23849d37ff7b2f2c |
def initAlgorithm(self, config): <NEW_LINE> <INDENT> self.addParameter( QgsProcessingParameterMultipleLayers( self.INPUTLAYERS, self.tr('Polygon Layers'), QgsProcessing.TypeVectorPolygon ) ) <NEW_LINE> self.addParameter( QgsProcessingParameterBoolean( self.SELECTED, self.tr('Process only selected features') ) ) <NEW_LINE> self.addParameter( QgsProcessingParameterNumber( self.TOLERANCE, self.tr('Snap radius'), minValue=0, defaultValue=1, type=QgsProcessingParameterNumber.Double ) ) <NEW_LINE> self.addParameter( QgsProcessingParameterNumber( self.MINAREA, self.tr('Minimum area'), minValue=0, defaultValue=0.0001, type=QgsProcessingParameterNumber.Double ) ) <NEW_LINE> self.addParameter( QgsProcessingParameterFeatureSink( self.FLAGS, self.tr('{0} Flags').format(self.displayName()) ) ) | Parameter setting. | 625941babe7bc26dc91cd4a0 |
def is_job_flow_running(job_flow): <NEW_LINE> <INDENT> steps = getattr(job_flow, 'steps', None) or [] <NEW_LINE> active_steps = [step for step in steps if step.state != 'CANCELLED'] <NEW_LINE> if not active_steps: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return not getattr(active_steps[-1], 'enddatetime', None) | Return ``True`` if the given job has any steps which are currently
running. | 625941ba925a0f43d2549d0f |
def create_user(self, username, email, password=None): <NEW_LINE> <INDENT> now = datetime.datetime.now() <NEW_LINE> try: <NEW_LINE> <INDENT> email_name, domain_part = email.strip().split('@', 1) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> email = '@'.join([email_name, domain_part.lower()]) <NEW_LINE> <DEDENT> user = self.model(username=username, email=email, is_staff=False, is_active=True, is_superuser=False, last_login=now, date_joined=now) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user | Creates and saves a User with the given username, email and password. | 625941ba76e4537e8c351513 |
def NCC(self, coeffs, cutoff, max_terms): <NEW_LINE> <INDENT> if max_terms is None: <NEW_LINE> <INDENT> max_terms = self.coeff_size <NEW_LINE> <DEDENT> n_terms = max_term = matrix = 0 <NEW_LINE> for p in range(max_terms): <NEW_LINE> <INDENT> if abs(coeffs[p]) >= cutoff: <NEW_LINE> <INDENT> matrix = matrix + coeffs[p]*self.Multiply(p) <NEW_LINE> n_terms += 1 <NEW_LINE> max_term = p <NEW_LINE> <DEDENT> <DEDENT> return n_terms, max_term, matrix | Build NCC multiplication matrix. | 625941ba71ff763f4b54952a |
def findMedianSortedArrays(nums1, nums2): <NEW_LINE> <INDENT> merged = nums1 + nums2 <NEW_LINE> merged.sort() <NEW_LINE> middle = len(merged)/2 <NEW_LINE> if len(merged) % 2 > 0: <NEW_LINE> <INDENT> return float(merged[middle]) <NEW_LINE> <DEDENT> return float((merged[middle-1] + merged[middle]) / 2.0) | :type nums1: List[int]
:type nums2: List[int]
:rtype: float | 625941ba26238365f5f0ed05 |
def _read_tokens(treebank_dir: str) -> List[str]: <NEW_LINE> <INDENT> def _extract_tokens_from(line: str) -> Generator[str, None, None]: <NEW_LINE> <INDENT> if line.isspace(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> column = line.split() <NEW_LINE> if not len(column) >= 2: <NEW_LINE> <INDENT> raise EvaluationError( f"Illformed line in source CoNLL data, only {len(column)}" f" whitespace separated columns found but word form is expected" f" to be on the second column.") <NEW_LINE> <DEDENT> token = column[1] <NEW_LINE> if token != "_": <NEW_LINE> <INDENT> yield from token.split("_") <NEW_LINE> <DEDENT> <DEDENT> def _read_tokens_from(path: str) -> Generator[str, None, None]: <NEW_LINE> <INDENT> logging.info(f"Reading tokens from '{path}'") <NEW_LINE> with open(path, "r", encoding="utf-8") as reader: <NEW_LINE> <INDENT> line_tokens = (_extract_tokens_from(line) for line in reader) <NEW_LINE> yield from itertools.chain.from_iterable(line_tokens) <NEW_LINE> <DEDENT> <DEDENT> paths = glob.iglob(f"{treebank_dir}/*.conll") <NEW_LINE> file_tokens = (_read_tokens_from(p) for p in paths) <NEW_LINE> tokens = list(itertools.chain.from_iterable(file_tokens)) <NEW_LINE> if not tokens: <NEW_LINE> <INDENT> raise EvaluationError( f"No tokens found in treebank files that are under '{treebank_dir}'.") <NEW_LINE> <DEDENT> return tokens | Reads tokens from CoNLL data and returns them in a list. | 625941ba090684286d50eb7c |
def end(self): <NEW_LINE> <INDENT> while self._current_workunit: <NEW_LINE> <INDENT> self.report.end_workunit(self._current_workunit) <NEW_LINE> self._current_workunit.end() <NEW_LINE> self._current_workunit = self._current_workunit.parent <NEW_LINE> <DEDENT> self.report.close() <NEW_LINE> try: <NEW_LINE> <INDENT> if self.run_info.get_info('outcome') is None: <NEW_LINE> <INDENT> self.run_info.add_info('outcome', self.root_workunit.outcome_string()) <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.upload_stats() | This pants run is over, so stop tracking it.
Note: If end() has been called once, subsequent calls are no-ops. | 625941bae1aae11d1e749b4f |
def slash_right(url='', index=1): <NEW_LINE> <INDENT> return url.rstrip('/').rsplit('/', index)[1] | 获取由/分割的字符串url的最后第index段,忽略结尾空串
:param url: 字符串
:param index: 第几个
:return: str | 625941ba379a373c97cfa9e5 |
def start(self): <NEW_LINE> <INDENT> self.to_index() <NEW_LINE> self.login(self.get_key()) | 启动
:return: | 625941ba167d2b6e31218a35 |
def enrich_logging(self): <NEW_LINE> <INDENT> if self.input.args.get('log_to_file') or self.input.environment.get('log_to_file') or self.cfg.pyt.get('config.log_to_file'): <NEW_LINE> <INDENT> set_root_logger(log_to_file=self.path.run() / "pyterraform.logs") <NEW_LINE> logger.info("Enabled logging to file \"%s\"", self.path.run() / "pyterraform.logs") <NEW_LINE> log.info("Enabled log to file for verbose analysis") | Based on cli inputs, enrich logging | 625941baa4f1c619b28afedc |
def populate_env(self): <NEW_LINE> <INDENT> os.putenv('PCOCC_JOB_ID', str(self.batchid)) <NEW_LINE> os.putenv('PCOCC_JOB_NAME', os.environ.get('SLURM_JOB_NAME', '')) | Populate environment variables with batch related info to propagate | 625941bad53ae8145f87a111 |
def test_geocode_esri_eu_soap(self): <NEW_LINE> <INDENT> candidates = self.g_esri_eu_soap.get_candidates(PlaceQuery( address='31 Maiden Lane', city='London', country='UK')) <NEW_LINE> self.assertEqual(len(candidates) > 0, True, 'No candidates returned.') | Test ESRI Europe SOAP geocoder | 625941bad6c5a10208143ee3 |
def trace(self, iter, reset=False): <NEW_LINE> <INDENT> Es = [] <NEW_LINE> States = [] <NEW_LINE> if reset is True: <NEW_LINE> <INDENT> self.acccnt = 0 <NEW_LINE> <DEDENT> for it in tqdm(range(iter)): <NEW_LINE> <INDENT> self.mcstep() <NEW_LINE> Es.append(self.energy) <NEW_LINE> States.append(np.array(self.s)) <NEW_LINE> <DEDENT> self.logs = {'energy': np.array(Es), 'state': np.array(States)} <NEW_LINE> return self.logs | interation multi MC step | 625941ba462c4b4f79d1d56c |
def _getMinPossibleValue(self): <NEW_LINE> <INDENT> decmax = 0 <NEW_LINE> for i in range(self.getDecDigitCount()): <NEW_LINE> <INDENT> decmax += 9 * math.pow(10, -(i + 1)) <NEW_LINE> <DEDENT> return -math.pow(10.0, self.getIntDigitCount()) + 1 - decmax | _getMinPossibleValue(self) -> None
Determines which is the minimum possible value that can be represented
with the current total number of digits.
@return (float) the minimum possible value | 625941ba0383005118ecf480 |
def create_user_account(request): <NEW_LINE> <INDENT> if request.method == "GET": <NEW_LINE> <INDENT> name = request.GET.get("name", "") <NEW_LINE> age = request.GET.get("age", "") <NEW_LINE> Class = request.GET.get("class", "") <NEW_LINE> if name and age: <NEW_LINE> <INDENT> user_obj = UserAccount() <NEW_LINE> user_obj.name = name <NEW_LINE> try: <NEW_LINE> <INDENT> age = int(age) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return HttpResponse('Age should be in numbers') <NEW_LINE> <DEDENT> user_obj.age = age <NEW_LINE> user_obj.Class = Class <NEW_LINE> user_obj.save() <NEW_LINE> return HttpResponse('Account created succesfully') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponse('Insufficiant parameter provided') <NEW_LINE> <DEDENT> <DEDENT> return HttpResponse('Invalid url') | take parameter from url and create user_account in the database | 625941ba50485f2cf553cc34 |
def has_edge(self, v_vals): <NEW_LINE> <INDENT> return v_vals in self._vals_to_edges_map | Checks if a certain edge already exists in this graph | 625941ba3317a56b86939b04 |
def testAccessors(self): <NEW_LINE> <INDENT> self.attr_statem.attribute.append(saml.Attribute()) <NEW_LINE> self.attr_statem.attribute.append(saml.Attribute()) <NEW_LINE> self.attr_statem.attribute[0].name = "testAttribute" <NEW_LINE> self.attr_statem.attribute[0].name_format = saml.NAME_FORMAT_URI <NEW_LINE> self.attr_statem.attribute[0].friendly_name = "test attribute" <NEW_LINE> self.attr_statem.attribute[0].attribute_value.append( saml.AttributeValue()) <NEW_LINE> self.attr_statem.attribute[0].attribute_value[0].text = self.text[0] <NEW_LINE> self.attr_statem.attribute[1].name = "testAttribute2" <NEW_LINE> self.attr_statem.attribute[1].name_format = saml.NAME_FORMAT_UNSPECIFIED <NEW_LINE> self.attr_statem.attribute[1].friendly_name = self.text[2] <NEW_LINE> self.attr_statem.attribute[1].attribute_value.append( saml.AttributeValue()) <NEW_LINE> self.attr_statem.attribute[1].attribute_value[0].text = self.text[2] <NEW_LINE> new_as = saml.attribute_statement_from_string( self.attr_statem.to_string()) <NEW_LINE> assert new_as.attribute[0].name == "testAttribute" <NEW_LINE> assert new_as.attribute[0].name_format == saml.NAME_FORMAT_URI <NEW_LINE> assert new_as.attribute[0].friendly_name == "test attribute" <NEW_LINE> assert new_as.attribute[0].attribute_value[0].text.strip() == self.text[ 0] <NEW_LINE> assert new_as.attribute[1].name == "testAttribute2" <NEW_LINE> assert new_as.attribute[1].name_format == saml.NAME_FORMAT_UNSPECIFIED <NEW_LINE> assert new_as.attribute[1].friendly_name == "value2 of test attribute" <NEW_LINE> assert new_as.attribute[1].attribute_value[0].text.strip() == self.text[ 2] | Test for Attribute accessors | 625941ba91af0d3eaac9b8b0 |
def copy(self): <NEW_LINE> <INDENT> copy = type(self)() <NEW_LINE> copy.name = self.name <NEW_LINE> copy.contents = [content.copy() for content in self.contents] <NEW_LINE> copy.exclusions = list(self.exclusions) <NEW_LINE> return copy | Return a copy of the package. | 625941ba099cdd3c635f0af8 |
@tf.keras.utils.register_keras_serializable(package='Text') <NEW_LINE> def simple_swish(features): <NEW_LINE> <INDENT> features = tf.convert_to_tensor(features) <NEW_LINE> return features * tf.nn.sigmoid(features) | Computes the Swish activation function.
The tf.nn.swish operation uses a custom gradient to reduce memory usage.
Since saving custom gradients in SavedModel is currently not supported, and
one would not be able to use an exported TF-Hub module for fine-tuning, we
provide this wrapper that can allow to select whether to use the native
TensorFlow swish operation, or whether to use a customized operation that
has uses default TensorFlow gradient computation.
Args:
features: A `Tensor` representing preactivation values.
Returns:
The activation value. | 625941ba1b99ca400220a94c |
def get_margin_position(self, pair=None): <NEW_LINE> <INDENT> if pair: <NEW_LINE> <INDENT> return self.private_api({'command': 'getMarginPosition', 'currencyPair': self.format_pair(pair) }) <NEW_LINE> <DEDENT> return self.private_api({'command': 'getMarginPosition'}) | get margin position for <pair> or for all pairs | 625941bad486a94d0b98dfe8 |
def set_default_init_cli_cmds(self): <NEW_LINE> <INDENT> init_cli_cmds = [] <NEW_LINE> init_cli_cmds.append("set --retcode true") <NEW_LINE> init_cli_cmds.append("echo off") <NEW_LINE> init_cli_cmds.append("set --vt100 off") <NEW_LINE> init_cli_cmds.append('set dut "'+self.name+'"') <NEW_LINE> init_cli_cmds.append(['set testcase "' + self.testcase + '"', True]) <NEW_LINE> return init_cli_cmds | Default init commands are set --retcode true, echo off, set --vt100 off, set dut <dut name>
and set testcase <tc name>
:return: List of default cli initialization commands. | 625941ba4527f215b584c2f6 |
def _fields(self): <NEW_LINE> <INDENT> cmd = "-e {0}" <NEW_LINE> return " ".join([cmd.format(field) for field in self.fields]) | construct fields that tshark should output | 625941ba1d351010ab8559b9 |
def blueprints(app): <NEW_LINE> <INDENT> manage.utils.load_blueprints( app, 'manage', app.config.get('MANAGE_BLUEPRINTS', []) ) | Register the blueprints for the application | 625941ba507cdc57c6306b6f |
def check_valid_port(p): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> p = int(p) <NEW_LINE> assert 0 < p < 65536 <NEW_LINE> return p <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise argparse.ArgumentTypeError('invalid port [1, 65535]: %s' % p) | True if p is a valid port number | 625941ba7cff6e4e81117821 |
def eval(self, expr): <NEW_LINE> <INDENT> context = Context(_globals=self.globals, engine=self, doc_vars=self.doc_vars, loaded_excel=self.loaded_excel) <NEW_LINE> self.actions = [] <NEW_LINE> e = expression.parseString(expr)[0] <NEW_LINE> if (log.getEffectiveLevel() == logging.DEBUG): <NEW_LINE> <INDENT> log.debug('e=%r - type=%s' % (e, type(e))) <NEW_LINE> <DEDENT> value = e.eval(context=context) <NEW_LINE> return value | Parse and evaluate a single VBA expression
:param expr: str, expression to be evaluated
:return: value of the evaluated expression | 625941baf548e778e58cd418 |
def get_history(self): <NEW_LINE> <INDENT> return self.train_batch_history, self.valid_batch_history | get history | 625941ba4e696a04525c92e8 |
def loadyaml(fin): <NEW_LINE> <INDENT> f = open(fin, 'r') <NEW_LINE> hdr = yaml.load(f) <NEW_LINE> return hdr | Loads a yaml file into the assigned variable:
hdr = loadyaml(fin)
hdr.keys() gives the dictionary | 625941ba82261d6c526ab33e |
def deserialize(self, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.header is None: <NEW_LINE> <INDENT> self.header = std_msgs.msg._Header.Header() <NEW_LINE> <DEDENT> end = 0 <NEW_LINE> _x = self <NEW_LINE> start = end <NEW_LINE> end += 12 <NEW_LINE> (_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += 4 <NEW_LINE> (length,) = _struct_I.unpack(str[start:end]) <NEW_LINE> start = end <NEW_LINE> end += length <NEW_LINE> self.header.frame_id = str[start:end] <NEW_LINE> start = end <NEW_LINE> end += 1 <NEW_LINE> (self.color,) = _struct_B.unpack(str[start:end]) <NEW_LINE> return self <NEW_LINE> <DEDENT> except struct.error as e: <NEW_LINE> <INDENT> raise roslib.message.DeserializationError(e) | unpack serialized message in str into this message instance
@param str: byte array of serialized message
@type str: str | 625941ba4d74a7450ccd405f |
def __init__(self, data_manager: DataManager, attribute: WithingsAttribute) -> None: <NEW_LINE> <INDENT> self._data_manager = data_manager <NEW_LINE> self._attribute = attribute <NEW_LINE> self._profile = self._data_manager.profile <NEW_LINE> self._user_id = self._data_manager.user_id <NEW_LINE> self._name = f"Withings {self._attribute.measurement.value} {self._profile}" <NEW_LINE> self._unique_id = get_attribute_unique_id(self._attribute, self._user_id) <NEW_LINE> self._state_data: Any | None = None | Initialize the Withings sensor. | 625941ba7d43ff24873a2b3c |
def candle_check(self, df, candle_period): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> indicator_conf = {} <NEW_LINE> if 'candle_recognition' in self.indicator_config: <NEW_LINE> <INDENT> for config in self.indicator_config['candle_recognition']: <NEW_LINE> <INDENT> if config['enabled'] and config['candle_period'] == candle_period and config['chart']: <NEW_LINE> <INDENT> indicator_conf = config <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if bool(indicator_conf): <NEW_LINE> <INDENT> signal = indicator_conf['signal'] <NEW_LINE> notification = indicator_conf['notification'] if 'notification' in indicator_conf else 'hot' <NEW_LINE> candle_check = indicator_conf['candle_check'] if 'candle_check' in indicator_conf else 1 <NEW_LINE> hot_tresh = indicator_conf['hot'] <NEW_LINE> cold_tresh = indicator_conf['cold'] <NEW_LINE> historical_data = df <NEW_LINE> cdl = candle_recognition.Candle_recognition() <NEW_LINE> candle_pattern = cdl.analyze( historical_data, signal, notification, candle_check, hot_tresh, cold_tresh) <NEW_LINE> candle_pattern = candle_pattern.drop( ['is_hot', 'is_cold'], axis=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> candle_pattern = pd.DataFrame() <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.logger.info( 'error in indicator config for candle pattern: {}'.format(sys.exc_info()[0])) <NEW_LINE> <DEDENT> return candle_pattern | df : dataframe with ohlcv values
candle_period : period for candles
return : dataframe with candle patterns | 625941ba097d151d1a222cf8 |
def card_ranks(hand): <NEW_LINE> <INDENT> return sorted([dict_card.get(x[0]) for x in hand], reverse=True) | Возвращает список рангов (его числовой эквивалент),
отсортированный от большего к меньшему | 625941ba01c39578d7e74cdf |
def objdump_file( self, filepath=None, syntax='intel', sections=None, ignore_sections=None): <NEW_LINE> <INDENT> filepath = filepath or self.filepath <NEW_LINE> if not os.path.exists(filepath): <NEW_LINE> <INDENT> print_err(f'File doesn\'t exist: {filepath}') <NEW_LINE> return None <NEW_LINE> <DEDENT> cmd = ['objdump', '-M', syntax.lower(), '-d', filepath] <NEW_LINE> debug('Running {}'.format(' '.join(cmd))) <NEW_LINE> with ProcessOutput(cmd) as objdump_proc: <NEW_LINE> <INDENT> if objdump_proc.stderr: <NEW_LINE> <INDENT> print_err(objdump_proc.stderr.decode()) <NEW_LINE> return None <NEW_LINE> <DEDENT> if not objdump_proc.stdout: <NEW_LINE> <INDENT> print_err('Can\'t decode, objdump had no output.') <NEW_LINE> return None <NEW_LINE> <DEDENT> debug(objdump_proc.stdout.decode()) <NEW_LINE> return self.format_objdump( objdump_proc.stdout.decode(), show_addr=True, sections=sections, ignore_sections=ignore_sections, ) | Run objdump on an executable file, and format it's output.
Arguments:
filepath : Executable to dump.
syntax : Syntax for objdump -M
sections : Only include section names in this list.
ignore_sections : Ignore any sections starting/ending with
strings in this list. | 625941ba6aa9bd52df036c3e |
def isDone(self): <NEW_LINE> <INDENT> return self.offset >= len(self.msgset_ids_to_remove) | See `TunableLoop`. | 625941ba796e427e537b045e |
@csrf_exempt <NEW_LINE> def handle_xmlrpc(request): <NEW_LINE> <INDENT> if request.method == "POST": <NEW_LINE> <INDENT> if DEBUG: <NEW_LINE> <INDENT> print(request.raw_post_data) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = HttpResponse(content_type='text/xml') <NEW_LINE> result = rpc.xmlrpcdispatcher._marshaled_dispatch(request.raw_post_data) <NEW_LINE> response.write(result) <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> print(result) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> return HttpResponseServerError() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> methods = rpc.xmlrpcdispatcher.system_listMethods() <NEW_LINE> categories = {} <NEW_LINE> for method in methods: <NEW_LINE> <INDENT> sig_ = rpc.xmlrpcdispatcher.system_methodSignature(method) <NEW_LINE> sig = { 'returns': sig_[0], 'args': ", ".join(sig_[1:]), } <NEW_LINE> method_help = rpc.xmlrpcdispatcher.system_methodHelp(method) <NEW_LINE> category = "General" <NEW_LINE> if method in rpc.xmlrpcdispatcher.funcs: <NEW_LINE> <INDENT> func = rpc.xmlrpcdispatcher.funcs[method] <NEW_LINE> if hasattr(func, "_xmlrpc_signature"): <NEW_LINE> <INDENT> category = func._xmlrpc_signature["category"] <NEW_LINE> category = category if category else "General" <NEW_LINE> <DEDENT> elif method.find("system.") == 0: <NEW_LINE> <INDENT> category = "System" <NEW_LINE> <DEDENT> <DEDENT> categories.setdefault(category, []).append((method, sig, method_help)) <NEW_LINE> <DEDENT> if hasattr(settings, 'XMLRPC_GET_TEMPLATE'): <NEW_LINE> <INDENT> if settings.DEBUG: <NEW_LINE> <INDENT> print("Use of settings.XMLRPC_GET_TEMPLATE is deprecated " + "Please update your code to use django_xmlrpc/templates") <NEW_LINE> <DEDENT> template = settings.XMLRPC_GET_TEMPLATE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template = 'xmlrpc_get.html' <NEW_LINE> <DEDENT> return render_to_response(template, {'categories': categories}) | Handles XML-RPC requests. All XML-RPC calls should be forwarded here
request
The HttpRequest object that carries the XML-RPC call. If this is a
GET request, nothing will happen (we only accept POST requests) | 625941ba63b5f9789fde6f81 |
def getStatus(self): <NEW_LINE> <INDENT> return self.__status | return job status | 625941babe7bc26dc91cd4a1 |
def strings2symbols(strings, go2type = None): <NEW_LINE> <INDENT> symbols = {} <NEW_LINE> if go2type is None: <NEW_LINE> <INDENT> raise TypeError("The type of keys in the dictionary to be transfered " "into must supply") <NEW_LINE> <DEDENT> elif go2type is "orsymbols": <NEW_LINE> <INDENT> for key, value in strings.items(): <NEW_LINE> <INDENT> symbols.update(dict(zip([sym.symbols(key)], [value]))) <NEW_LINE> <DEDENT> <DEDENT> elif go2type is "dysymbols": <NEW_LINE> <INDENT> for key, value in strings.items(): <NEW_LINE> <INDENT> symbols.update(dict(zip([mec.dynamicsymbols(key)], [value]))) <NEW_LINE> <DEDENT> <DEDENT> return symbols | Returns a dictionary with keys being symbols instead of strings.
Symbols here can be ordinary symbols or dynamic symbols.
Parameter
---------
strings: a dictionary
A dictionary with keys being strings.
go2type: string
A type of keys of the dictionary to be transfered into.
Two options only: "orsymbols" for ordinary symbols,
"dysymbols" for dynamic symbols.
Return
------
symbols: a dictionay
A dictionary with keys being symbols. | 625941ba656771135c3eb70e |
@csrf.csrf_protect <NEW_LINE> def subscribe_for_tags(request): <NEW_LINE> <INDENT> tag_names = getattr(request,request.method).get('tags','').strip().split() <NEW_LINE> pure_tag_names, wildcards = forms.clean_marked_tagnames(tag_names) <NEW_LINE> if request.user.is_authenticated: <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> if 'ok' in request.POST: <NEW_LINE> <INDENT> request.user.mark_tags( pure_tag_names, wildcards, reason = 'good', action = 'add' ) <NEW_LINE> request.user.message_set.create( message = _('Your tag subscription was saved, thanks!') ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = _( 'Tag subscription was canceled (<a href="%(url)s">undo</a>).' ) % {'url': escape(request.path) + '?tags=' + getattr(request,request.method)['tags']} <NEW_LINE> request.user.message_set.create(message = message) <NEW_LINE> <DEDENT> return HttpResponseRedirect(reverse('index')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = {'tags': tag_names} <NEW_LINE> return render(request, 'subscribe_for_tags.html', data) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> all_tag_names = pure_tag_names + wildcards <NEW_LINE> message = _('Please sign in to subscribe for: %(tags)s') % {'tags': ', '.join(all_tag_names)} <NEW_LINE> request.user.message_set.create(message = message) <NEW_LINE> request.session['subscribe_for_tags'] = (pure_tag_names, wildcards) <NEW_LINE> return HttpResponseRedirect(url_utils.get_login_url()) | process subscription of users by tags | 625941ba442bda511e8be2c1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.